From 8a23c7a8fb57082787c2821992a07968fe1e9cc1 Mon Sep 17 00:00:00 2001 From: streamer45 Date: Wed, 28 Jan 2026 15:14:14 +0100 Subject: [PATCH 1/6] feat: plugin marketplace --- Cargo.lock | 79 +- apps/skit/Cargo.toml | 8 +- apps/skit/src/config.rs | 99 +- apps/skit/src/lib.rs | 5 + apps/skit/src/main.rs | 5 + apps/skit/src/marketplace.rs | 856 +++++++ apps/skit/src/marketplace_installer.rs | 2244 +++++++++++++++++ apps/skit/src/marketplace_security.rs | 354 +++ apps/skit/src/plugin_paths.rs | 115 + apps/skit/src/plugin_records.rs | 45 + apps/skit/src/plugins.rs | 180 +- apps/skit/src/server.rs | 658 ++++- apps/skit/src/state.rs | 2 + apps/skit/tests/plugin_integration_test.rs | 132 +- crates/engine/src/dynamic_actor.rs | 16 +- crates/engine/src/lib.rs | 12 +- crates/engine/src/tests/connection_types.rs | 3 +- docs/astro.config.mjs | 3 + .../content/docs/guides/creating-pipelines.md | 2 +- .../content/docs/guides/installing-plugins.md | 124 + .../content/docs/guides/model-downloads.md | 73 + .../content/docs/guides/publishing-plugins.md | 124 + docs/src/content/docs/guides/web-ui.md | 2 +- .../content/docs/guides/writing-plugins.md | 6 + .../docs/reference/configuration-generated.md | 2201 ++++++++-------- .../content/docs/reference/configuration.md | 13 + docs/src/content/docs/reference/http-api.md | 44 + .../docs/reference/nodes/audio-gain.md | 14 +- .../docs/reference/nodes/audio-mixer.md | 110 +- .../reference/nodes/audio-opus-encoder.md | 14 +- .../docs/reference/nodes/audio-pacer.md | 48 +- .../docs/reference/nodes/audio-resampler.md | 26 +- .../reference/nodes/containers-ogg-muxer.md | 48 +- .../reference/nodes/containers-webm-muxer.md | 62 +- .../docs/reference/nodes/core-file-reader.md | 18 +- .../docs/reference/nodes/core-file-writer.md | 18 +- .../reference/nodes/core-json-serialize.md | 20 +- .../docs/reference/nodes/core-pacer.md | 26 +- .../docs/reference/nodes/core-script.md | 96 +- .../reference/nodes/core-telemetry-out.md | 36 +- .../reference/nodes/core-telemetry-tap.md | 46 +- .../docs/reference/nodes/core-text-chunker.md | 60 +- .../reference/nodes/streamkit-http-input.md | 56 +- .../reference/nodes/transport-http-fetcher.md | 18 +- .../reference/nodes/transport-moq-peer.md | 42 +- .../nodes/transport-moq-publisher.md | 46 +- .../nodes/transport-moq-subscriber.md | 36 +- .../content/docs/reference/packets/custom.md | 100 +- .../docs/reference/packets/raw-audio.md | 38 +- .../docs/reference/packets/transcription.md | 170 +- .../content/docs/reference/plugins/index.md | 3 +- .../plugins/plugin-native-pocket-tts.md | 138 + .../plugins/plugin-native-whisper.md | 6 +- justfile | 14 +- marketplace/official-plugins.json | 14 + plugins/native/helsinki/plugin.yml | 2 + plugins/native/kokoro/plugin.yml | 1 + plugins/native/matcha/plugin.yml | 1 + plugins/native/nllb/plugin.yml | 1 + plugins/native/piper/plugin.yml | 2 + plugins/native/sensevoice/plugin.yml | 2 + plugins/native/vad/plugin.yml | 1 + plugins/native/whisper/README.md | 36 +- plugins/native/whisper/plugin.yml | 4 + plugins/native/whisper/src/lib.rs | 4 +- samples/pipelines/dynamic/VOICE_AGENT.md | 2 +- samples/pipelines/dynamic/WEATHER_AGENT.md | 2 +- .../dynamic/speech-translate-en-es.yaml | 4 +- .../speech-translate-helsinki-en-es.yaml | 4 +- .../pipelines/dynamic/voice-agent-openai.yaml | 4 +- .../dynamic/voice-weather-open-meteo.yaml | 4 +- samples/pipelines/oneshot/speech_to_text.yml | 2 +- samples/skit.toml | 40 +- scripts/marketplace/upload_models_to_hf.py | 208 ++ ui/src/App.tsx | 3 + ui/src/Layout.tsx | 2 +- ui/src/services/marketplace.test.ts | 113 + ui/src/services/marketplace.ts | 109 + ui/src/types/marketplace.ts | 138 + ui/src/types/types.ts | 2 + ui/src/views/PluginsView.styles.ts | 403 +++ ui/src/views/PluginsView.tsx | 75 + ui/src/views/TokensView.tsx | 3 + ui/src/views/admin/AdminNav.styles.ts | 35 + ui/src/views/admin/AdminNav.tsx | 22 + ui/src/views/plugins/InstalledPluginsTab.tsx | 178 ++ ui/src/views/plugins/MarketplaceJobPanel.tsx | 86 + .../plugins/MarketplaceModelsSection.tsx | 122 + ui/src/views/plugins/MarketplacePanels.tsx | 440 ++++ ui/src/views/plugins/MarketplaceTab.tsx | 498 ++++ ui/src/views/plugins/marketplaceFormatters.ts | 61 + ui/src/views/plugins/marketplaceHooks.ts | 236 ++ 92 files changed, 9893 insertions(+), 1685 deletions(-) create mode 100644 apps/skit/src/marketplace.rs create mode 100644 apps/skit/src/marketplace_installer.rs create mode 100644 apps/skit/src/marketplace_security.rs create mode 100644 apps/skit/src/plugin_paths.rs create mode 100644 apps/skit/src/plugin_records.rs create mode 100644 docs/src/content/docs/guides/installing-plugins.md create mode 100644 docs/src/content/docs/guides/model-downloads.md create mode 100644 docs/src/content/docs/guides/publishing-plugins.md create mode 100644 docs/src/content/docs/reference/plugins/plugin-native-pocket-tts.md create mode 100644 scripts/marketplace/upload_models_to_hf.py create mode 100644 ui/src/services/marketplace.test.ts create mode 100644 ui/src/services/marketplace.ts create mode 100644 ui/src/types/marketplace.ts create mode 100644 ui/src/views/PluginsView.styles.ts create mode 100644 ui/src/views/PluginsView.tsx create mode 100644 ui/src/views/admin/AdminNav.styles.ts create mode 100644 ui/src/views/admin/AdminNav.tsx create mode 100644 ui/src/views/plugins/InstalledPluginsTab.tsx create mode 100644 ui/src/views/plugins/MarketplaceJobPanel.tsx create mode 100644 ui/src/views/plugins/MarketplaceModelsSection.tsx create mode 100644 ui/src/views/plugins/MarketplacePanels.tsx create mode 100644 ui/src/views/plugins/MarketplaceTab.tsx create mode 100644 ui/src/views/plugins/marketplaceFormatters.ts create mode 100644 ui/src/views/plugins/marketplaceHooks.ts diff --git a/Cargo.lock b/Cargo.lock index 059e010f..58516420 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -466,6 +466,15 @@ dependencies = [ "typenum", ] +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest", +] + [[package]] name = "block-buffer" version = "0.10.4" @@ -523,6 +532,25 @@ dependencies = [ "bytes", ] +[[package]] +name = "bzip2" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49ecfb22d906f800d4fe833b6282cf4dc1c298f5057ca0b5445e5c209735ca47" +dependencies = [ + "bzip2-sys", +] + +[[package]] +name = "bzip2-sys" +version = "0.1.13+1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225bff33b2141874fe80d71e07d6eec4f85c5c216453dd96388240f96e1acc14" +dependencies = [ + "cc", + "pkg-config", +] + [[package]] name = "cap-fs-ext" version = "3.4.5" @@ -1157,6 +1185,7 @@ checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", + "subtle", ] [[package]] @@ -1363,6 +1392,17 @@ dependencies = [ "version_check", ] +[[package]] +name = "filetime" +version = "0.2.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db" +dependencies = [ + "cfg-if", + "libc", + "libredox", +] + [[package]] name = "find-msvc-tools" version = "0.1.5" @@ -2314,6 +2354,7 @@ checksum = "df15f6eac291ed1cf25865b1ee60399f57e7c227e7f51bdbd4c5270396a9ed50" dependencies = [ "bitflags 2.10.0", "libc", + "redox_syscall 0.6.0", ] [[package]] @@ -3017,7 +3058,7 @@ dependencies = [ "cfg-if", "libc", "petgraph", - "redox_syscall", + "redox_syscall 0.5.18", "smallvec 1.15.1", "windows-link 0.2.1", ] @@ -3573,6 +3614,15 @@ dependencies = [ "bitflags 2.10.0", ] +[[package]] +name = "redox_syscall" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec96166dafa0886eb81fe1c0a388bece180fbef2135f97c1e2cf8302e74b43b5" +dependencies = [ + "bitflags 2.10.0", +] + [[package]] name = "redox_users" version = "0.4.6" @@ -4709,11 +4759,14 @@ dependencies = [ "axum", "axum-server", "base64 0.22.1", + "blake2", "bytes", + "bzip2", "clap", "console-subscriber", "dhat", "figment", + "flate2", "futures", "futures-util", "getrandom 0.3.4", @@ -4738,6 +4791,7 @@ dependencies = [ "rust-embed", "rustls", "schemars 1.2.0", + "semver", "serde", "serde-saphyr 0.0.15", "serde_json", @@ -4749,6 +4803,7 @@ dependencies = [ "streamkit-plugin-native", "streamkit-plugin-wasm", "sysinfo", + "tar", "tempfile", "thiserror 2.0.17", "tikv-jemallocator", @@ -4767,6 +4822,7 @@ dependencies = [ "tracing-subscriber", "urlencoding", "uuid", + "zstd", ] [[package]] @@ -5001,6 +5057,17 @@ dependencies = [ "winx", ] +[[package]] +name = "tar" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" +dependencies = [ + "filetime", + "libc", + "xattr", +] + [[package]] name = "target-lexicon" version = "0.13.4" @@ -7068,6 +7135,16 @@ dependencies = [ "time", ] +[[package]] +name = "xattr" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156" +dependencies = [ + "libc", + "rustix 1.1.3", +] + [[package]] name = "yansi" version = "1.0.1" diff --git a/apps/skit/Cargo.toml b/apps/skit/Cargo.toml index 3d48b198..8dca0def 100644 --- a/apps/skit/Cargo.toml +++ b/apps/skit/Cargo.toml @@ -35,6 +35,8 @@ serde = { workspace = true, features = ["derive"] } serde-saphyr = { workspace = true } serde_json = "1.0" schemars = "1.2.0" +base64 = "0.22" +semver = "1.0" # For serializing the default config into TOML format toml = "0.9.11" @@ -61,6 +63,10 @@ futures = { workspace = true } uuid = { version = "1.19", features = ["v4", "serde"] } http-body-util = "0.1" multer = "3.1" +tar = "0.4" +zstd = "0.13" +flate2 = "1.1" +bzip2 = "0.5" # For embedding static files rust-embed = "8.11" @@ -119,13 +125,13 @@ glob = "0.3" jsonwebtoken = { version = "10.2.0", default-features = false, features = ["aws_lc_rs"] } sha2 = "0.10" hex = "0.4" -base64 = "0.22" thiserror = "2.0" getrandom = "0.3" aws-lc-rs = "1" # For MoQ auth path matching (optional, with moq feature) moq-lite = { version = "0.11.0", optional = true } +blake2 = "0.10.6" [features] default = ["script"] diff --git a/apps/skit/src/config.rs b/apps/skit/src/config.rs index 4766088d..09f95a9a 100644 --- a/apps/skit/src/config.rs +++ b/apps/skit/src/config.rs @@ -340,6 +340,26 @@ impl Default for ServerConfig { #[derive(Deserialize, Serialize, Debug, Clone, JsonSchema)] pub struct PluginConfig { pub directory: String, + #[serde(flatten, default)] + pub http_management: PluginHttpConfig, + #[serde(flatten, default)] + pub marketplace: PluginMarketplaceConfig, + /// Minisign public keys (contents of `.pub` files) trusted for marketplace manifests. + #[serde(default)] + pub trusted_pubkeys: Vec, + /// Registry index URLs (e.g., `https://example.com/index.json`). + #[serde(default)] + pub registries: Vec, + /// Optional directory to store downloaded models (defaults to `models` when unset). + #[serde(default)] + pub models_dir: Option, + /// Optional Hugging Face token for gated model downloads. + #[serde(default)] + pub huggingface_token: Option, +} + +#[derive(Deserialize, Serialize, Debug, Clone, Default, JsonSchema)] +pub struct PluginHttpConfig { /// Controls whether runtime plugin upload/delete is allowed via the public APIs. /// /// Default is false to avoid accidental exposure when running without an auth layer. @@ -347,12 +367,89 @@ pub struct PluginConfig { pub allow_http_management: bool, } +#[derive(Deserialize, Serialize, Debug, Clone, Default, JsonSchema)] +pub struct PluginMarketplaceConfig { + /// Enables the plugin marketplace API and UI (default: false). + #[serde(default)] + pub marketplace_enabled: bool, + /// Allows native plugins to be installed from a marketplace (default: false). + /// + /// Native plugins run in-process and are unsafe without full trust. + #[serde(default)] + pub allow_native_marketplace: bool, + #[serde(flatten, default)] + pub security: PluginMarketplaceSecurityConfig, +} + +#[derive(Deserialize, Serialize, Debug, Clone, Copy, JsonSchema, Default)] +#[serde(rename_all = "snake_case")] +pub enum MarketplaceSchemePolicy { + #[default] + HttpsOnly, + AllowHttp, +} + +#[derive(Deserialize, Serialize, Debug, Clone, Copy, JsonSchema, Default)] +#[serde(rename_all = "snake_case")] +pub enum MarketplaceHostPolicy { + #[default] + PublicOnly, + AllowPrivate, +} + +#[derive(Deserialize, Serialize, Debug, Clone, JsonSchema)] +pub struct PluginMarketplaceSecurityConfig { + /// Allow direct URL model downloads from manifests (default: false). + #[serde(default)] + pub allow_model_urls: bool, + /// Require marketplace URLs to share origin with the registry (default: false). + #[serde(default = "default_require_registry_origin")] + pub marketplace_require_registry_origin: bool, + /// Scheme policy for marketplace URLs (default: https_only). + #[serde(default)] + pub marketplace_scheme_policy: MarketplaceSchemePolicy, + /// Host policy for marketplace URLs (default: public_only). + #[serde(default)] + pub marketplace_host_policy: MarketplaceHostPolicy, + /// Resolve hostnames for marketplace URLs and check resolved IPs (default: false). + #[serde(default)] + pub marketplace_resolve_hostnames: bool, + /// Allowed marketplace origins (e.g., "https://example.com", "https://example.com:*"). + #[serde(default)] + pub marketplace_url_allowlist: Vec, +} + +impl Default for PluginMarketplaceSecurityConfig { + fn default() -> Self { + Self { + allow_model_urls: false, + marketplace_require_registry_origin: default_require_registry_origin(), + marketplace_scheme_policy: MarketplaceSchemePolicy::default(), + marketplace_host_policy: MarketplaceHostPolicy::default(), + marketplace_resolve_hostnames: false, + marketplace_url_allowlist: Vec::new(), + } + } +} + impl Default for PluginConfig { fn default() -> Self { - Self { directory: ".plugins".to_string(), allow_http_management: false } + Self { + directory: ".plugins".to_string(), + http_management: PluginHttpConfig::default(), + marketplace: PluginMarketplaceConfig::default(), + trusted_pubkeys: Vec::new(), + registries: Vec::new(), + models_dir: None, + huggingface_token: None, + } } } +const fn default_require_registry_origin() -> bool { + false +} + const fn default_keep_models_loaded() -> bool { true } diff --git a/apps/skit/src/lib.rs b/apps/skit/src/lib.rs index 9a711635..8c33abea 100644 --- a/apps/skit/src/lib.rs +++ b/apps/skit/src/lib.rs @@ -8,9 +8,14 @@ pub mod cli; pub mod config; pub mod file_security; pub mod logging; +pub mod marketplace; +pub mod marketplace_installer; +pub mod marketplace_security; #[cfg(feature = "moq")] pub mod moq_gateway; pub mod permissions; +pub mod plugin_paths; +pub mod plugin_records; pub mod plugins; pub mod profiling; pub mod role_extractor; diff --git a/apps/skit/src/main.rs b/apps/skit/src/main.rs index 99f12324..451b76e1 100644 --- a/apps/skit/src/main.rs +++ b/apps/skit/src/main.rs @@ -36,9 +36,14 @@ mod cli; mod config; mod file_security; mod logging; +mod marketplace; +mod marketplace_installer; +mod marketplace_security; #[cfg(feature = "moq")] mod moq_gateway; mod permissions; +mod plugin_paths; +mod plugin_records; mod plugins; mod profiling; mod role_extractor; diff --git a/apps/skit/src/marketplace.rs b/apps/skit/src/marketplace.rs new file mode 100644 index 00000000..6b49954f --- /dev/null +++ b/apps/skit/src/marketplace.rs @@ -0,0 +1,856 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +use std::{ + collections::HashMap, + fmt::Write, + sync::Arc, + time::{Duration, Instant}, +}; + +use anyhow::{anyhow, Context, Result}; +use aws_lc_rs::signature::{UnparsedPublicKey, ED25519}; +use base64::{engine::general_purpose, Engine as _}; +use blake2::{digest::consts::U64, Blake2b, Digest}; +use bytes::Bytes; +use reqwest::Url; +use serde::{Deserialize, Serialize}; +use tokio::sync::RwLock; + +use crate::marketplace_security::{validated_get_bytes, MarketplaceUrlPolicy, OriginKey}; + +const MINISIGN_ALGO_ED25519: [u8; 2] = *b"Ed"; +const MINISIGN_ALGO_ED25519_HASHED: [u8; 2] = *b"ED"; +const MINISIGN_PUBLIC_KEY_LEN: usize = 42; +const MINISIGN_SIGNATURE_LEN: usize = 74; +const MAX_INDEX_CACHE_ENTRIES: usize = 32; +const MAX_MANIFEST_CACHE_ENTRIES: usize = 128; + +#[derive(Debug, Clone)] +pub struct MinisignPublicKey { + key_id: [u8; 8], + public_key: [u8; 32], +} + +impl MinisignPublicKey { + /// Parses a minisign public key string. + /// + /// # Errors + /// + /// Returns an error if the key is missing, malformed, or not Ed25519. + pub fn parse(input: &str) -> Result { + let line = extract_minisign_payload(input)?; + let decoded = decode_base64_line(&line).context("Failed to decode minisign public key")?; + + if decoded.len() != MINISIGN_PUBLIC_KEY_LEN { + return Err(anyhow!( + "Invalid minisign public key length: expected {MINISIGN_PUBLIC_KEY_LEN} bytes" + )); + } + if decoded[0..2] != MINISIGN_ALGO_ED25519 { + return Err(anyhow!("Unsupported minisign public key algorithm: expected Ed25519")); + } + + let mut key_id = [0u8; 8]; + key_id.copy_from_slice(&decoded[2..10]); + + let mut public_key = [0u8; 32]; + public_key.copy_from_slice(&decoded[10..42]); + + Ok(Self { key_id, public_key }) + } + + pub fn key_id_hex(&self) -> String { + key_id_hex(self.key_id) + } + + /// Verifies a minisign signature against the provided message. + /// + /// # Errors + /// + /// Returns an error if the key ID does not match or the signature fails verification. + pub fn verify(&self, signature: &MinisignSignature, message: &[u8]) -> Result<()> { + if self.key_id != signature.key_id { + return Err(anyhow!("Signature key id does not match trusted key")); + } + + let verifier = UnparsedPublicKey::new(&ED25519, &self.public_key); + if signature.prehashed { + let hash = Blake2b::::digest(message); + verifier + .verify(&hash, &signature.signature) + .map_err(|_| anyhow!("Minisign signature verification failed")) + } else { + verifier + .verify(message, &signature.signature) + .map_err(|_| anyhow!("Minisign signature verification failed")) + } + } +} + +#[derive(Debug, Clone)] +pub struct MinisignSignature { + prehashed: bool, + key_id: [u8; 8], + signature: [u8; 64], +} + +impl MinisignSignature { + /// Parses a minisign signature string. + /// + /// # Errors + /// + /// Returns an error if the signature is missing, malformed, or not Ed25519. + pub fn parse(input: &str) -> Result { + let line = extract_minisign_payload(input)?; + let decoded = decode_base64_line(&line).context("Failed to decode minisign signature")?; + + if decoded.len() != MINISIGN_SIGNATURE_LEN { + return Err(anyhow!( + "Invalid minisign signature length: expected {MINISIGN_SIGNATURE_LEN} bytes" + )); + } + let prehashed = if decoded[0..2] == MINISIGN_ALGO_ED25519 { + false + } else if decoded[0..2] == MINISIGN_ALGO_ED25519_HASHED { + true + } else { + return Err(anyhow!("Unsupported minisign signature algorithm: expected Ed25519")); + }; + + let mut key_id = [0u8; 8]; + key_id.copy_from_slice(&decoded[2..10]); + + let mut signature = [0u8; 64]; + signature.copy_from_slice(&decoded[10..74]); + + Ok(Self { prehashed, key_id, signature }) + } + + pub fn key_id_hex(&self) -> String { + key_id_hex(self.key_id) + } +} + +#[derive(Debug, Clone)] +pub struct MinisignVerifier { + trusted_keys: Vec, +} + +impl MinisignVerifier { + /// Builds a verifier from a list of minisign public key strings. + /// + /// # Errors + /// + /// Returns an error if any key is malformed or unsupported. + pub fn from_trusted_pubkeys(keys: &[String]) -> Result { + let mut trusted_keys = Vec::new(); + for key in keys { + let trimmed = key.trim(); + if trimmed.is_empty() { + continue; + } + trusted_keys.push(MinisignPublicKey::parse(trimmed)?); + } + Ok(Self { trusted_keys }) + } + + /// Verifies a minisign signature for the provided message. + /// + /// # Errors + /// + /// Returns an error if no trusted keys are configured or verification fails. + pub fn verify(&self, message: &[u8], signature_text: &str) -> Result { + if self.trusted_keys.is_empty() { + return Err(anyhow!("No trusted minisign public keys configured")); + } + + let signature = MinisignSignature::parse(signature_text)?; + for key in &self.trusted_keys { + if key.key_id == signature.key_id { + key.verify(&signature, message)?; + return Ok(VerifiedSignature { key_id: key.key_id_hex() }); + } + } + + let key_id = signature.key_id_hex(); + Err(anyhow!("Signature key id {key_id} is not in the trusted key set")) + } +} + +#[derive(Debug, Clone)] +pub struct VerifiedSignature { + pub key_id: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RegistryIndex { + #[serde(default = "default_registry_schema_version")] + pub schema_version: u32, + #[serde(default)] + pub plugins: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RegistryPlugin { + pub id: String, + pub name: Option, + pub description: Option, + pub latest: Option, + #[serde(default)] + pub versions: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RegistryPluginVersion { + pub version: String, + pub manifest_url: String, + pub signature_url: Option, + pub published_at: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PluginManifest { + #[serde(default = "default_manifest_schema_version")] + pub schema_version: u32, + pub id: String, + pub name: Option, + pub version: String, + pub node_kind: String, + pub kind: PluginKind, + pub description: Option, + pub license: Option, + pub license_url: Option, + pub homepage: Option, + pub repository: Option, + pub entrypoint: String, + pub bundle: PluginBundle, + pub compatibility: Option, + #[serde(default)] + pub models: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum PluginKind { + Wasm, + Native, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PluginBundle { + pub url: String, + pub sha256: String, + pub size_bytes: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PluginCompatibility { + pub streamkit: Option, + #[serde(default)] + pub os: Vec, + #[serde(default)] + pub arch: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ModelSpec { + pub id: Option, + pub name: Option, + #[serde(default)] + pub default: bool, + #[serde(flatten)] + pub source: ModelSource, + pub expected_size_bytes: Option, + pub sha256: Option, + pub license: Option, + pub license_url: Option, + #[serde(default)] + pub gated: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "source", rename_all = "lowercase")] +pub enum ModelSource { + Huggingface { repo_id: String, revision: Option, files: Vec }, + Url { url: String }, +} + +#[derive(Debug, Clone)] +pub struct RegistryClient { + http: reqwest::Client, + cache: Arc>, + index_ttl: Duration, + manifest_ttl: Duration, +} + +impl RegistryClient { + /// Creates a registry client with timeouts and cache TTLs. + /// + /// # Errors + /// + /// Returns an error if the HTTP client cannot be constructed. + pub fn new(timeout: Duration, index_ttl: Duration, manifest_ttl: Duration) -> Result { + let http = reqwest::Client::builder() + .timeout(timeout) + .redirect(reqwest::redirect::Policy::none()) + .build() + .context("Failed to build registry HTTP client")?; + Ok(Self { + http, + cache: Arc::new(RwLock::new(RegistryCache::default())), + index_ttl, + manifest_ttl, + }) + } + + /// Fetches a registry index while validating redirect hops against policy. + /// + /// # Errors + /// + /// Returns an error if the registry cannot be fetched, parsed, or violates URL policy. + pub async fn fetch_index_with_policy( + &self, + url: &Url, + policy: &MarketplaceUrlPolicy, + registry_origin: &OriginKey, + ) -> Result { + let url_str = url.as_str(); + if let Some(cached) = self.get_cached_index(url_str).await { + return Ok(cached); + } + + let bytes = validated_get_bytes( + &self.http, + policy, + "registry index", + url, + Some(registry_origin), + None, + ) + .await?; + let index: RegistryIndex = serde_json::from_slice(&bytes) + .with_context(|| format!("Failed to parse registry index from {url_str}"))?; + + self.set_cached_index(url_str, index.clone()).await; + Ok(index) + } + + /// Fetches a plugin manifest while validating redirect hops against policy. + /// + /// # Errors + /// + /// Returns an error if the manifest cannot be fetched, parsed, or violates URL policy. + pub async fn fetch_manifest_raw_with_policy( + &self, + url: &Url, + policy: &MarketplaceUrlPolicy, + registry_origin: &OriginKey, + ) -> Result { + let entry = self.fetch_manifest_entry_with_policy(url, policy, registry_origin).await?; + Ok(ManifestRaw { bytes: entry.raw, manifest: entry.manifest }) + } + + /// Fetches a text resource while validating redirect hops against policy. + /// + /// # Errors + /// + /// Returns an error if the URL cannot be fetched, parsed, or violates URL policy. + pub async fn fetch_text_with_policy( + &self, + label: &str, + url: &Url, + policy: &MarketplaceUrlPolicy, + registry_origin: &OriginKey, + ) -> Result { + let bytes = + validated_get_bytes(&self.http, policy, label, url, Some(registry_origin), None) + .await?; + std::str::from_utf8(&bytes) + .with_context(|| format!("Response from {url} is not valid UTF-8")) + .map(std::string::ToString::to_string) + } + + async fn fetch_manifest_entry_with_policy( + &self, + url: &Url, + policy: &MarketplaceUrlPolicy, + registry_origin: &OriginKey, + ) -> Result { + let url_str = url.as_str(); + if let Some(cached) = self.get_cached_manifest(url_str).await { + return Ok(cached); + } + + let bytes = validated_get_bytes( + &self.http, + policy, + "manifest url", + url, + Some(registry_origin), + None, + ) + .await?; + let manifest: PluginManifest = serde_json::from_slice(&bytes) + .with_context(|| format!("Failed to parse plugin manifest from {url_str}"))?; + let entry = ManifestCacheEntry { raw: bytes, manifest }; + + self.set_cached_manifest(url_str, entry.clone()).await; + Ok(entry) + } + + async fn get_cached_index(&self, url: &str) -> Option { + let cache = self.cache.read().await; + cache + .indexes + .get(url) + .filter(|entry| entry.is_fresh(self.index_ttl)) + .map(|entry| entry.value.clone()) + } + + async fn set_cached_index(&self, url: &str, index: RegistryIndex) { + let mut cache = self.cache.write().await; + cache.indexes.insert(url.to_string(), Cached::new(index)); + cache.prune_indexes(self.index_ttl, MAX_INDEX_CACHE_ENTRIES); + } + + async fn get_cached_manifest(&self, url: &str) -> Option { + let cache = self.cache.read().await; + cache + .manifests + .get(url) + .filter(|entry| entry.is_fresh(self.manifest_ttl)) + .map(|entry| entry.value.clone()) + } + + async fn set_cached_manifest(&self, url: &str, manifest: ManifestCacheEntry) { + let mut cache = self.cache.write().await; + cache.manifests.insert(url.to_string(), Cached::new(manifest)); + cache.prune_manifests(self.manifest_ttl, MAX_MANIFEST_CACHE_ENTRIES); + } +} + +#[derive(Debug, Clone)] +pub struct ManifestRaw { + pub bytes: Bytes, + pub manifest: PluginManifest, +} + +#[derive(Debug, Default)] +struct RegistryCache { + indexes: HashMap>, + manifests: HashMap>, +} + +impl RegistryCache { + fn prune_indexes(&mut self, ttl: Duration, max_entries: usize) { + Self::prune_map(&mut self.indexes, ttl, max_entries); + } + + fn prune_manifests(&mut self, ttl: Duration, max_entries: usize) { + Self::prune_map(&mut self.manifests, ttl, max_entries); + } + + fn prune_map(map: &mut HashMap>, ttl: Duration, max_entries: usize) { + map.retain(|_, entry| entry.is_fresh(ttl)); + while map.len() > max_entries { + let oldest_key = + map.iter().min_by_key(|(_, entry)| entry.fetched_at).map(|(key, _)| key.clone()); + let Some(oldest_key) = oldest_key else { + break; + }; + map.remove(&oldest_key); + } + } +} + +#[derive(Debug, Clone)] +struct Cached { + value: T, + fetched_at: Instant, +} + +impl Cached { + fn new(value: T) -> Self { + Self { value, fetched_at: Instant::now() } + } + + fn is_fresh(&self, ttl: Duration) -> bool { + self.fetched_at.elapsed() < ttl + } +} + +#[derive(Debug, Clone)] +struct ManifestCacheEntry { + raw: Bytes, + manifest: PluginManifest, +} + +fn extract_minisign_payload(input: &str) -> Result { + for line in input.lines() { + let trimmed = line.trim(); + if trimmed.is_empty() { + continue; + } + if trimmed.starts_with("untrusted comment:") || trimmed.starts_with("trusted comment:") { + continue; + } + return Ok(trimmed.to_string()); + } + + Err(anyhow!("Minisign payload line not found")) +} + +fn decode_base64_line(line: &str) -> Result> { + general_purpose::STANDARD + .decode(line.as_bytes()) + .map_err(|err| anyhow!("Base64 decode failed: {err}")) +} + +fn key_id_hex(key_id: [u8; 8]) -> String { + let mut out = String::with_capacity(16); + for byte in key_id { + let _ = write!(&mut out, "{byte:02x}"); + } + out +} + +const fn default_registry_schema_version() -> u32 { + 1 +} + +const fn default_manifest_schema_version() -> u32 { + 1 +} + +#[cfg(test)] +#[allow(clippy::unwrap_used)] +mod tests { + use super::*; + + // Test fixtures generated with minisign CLI + // Key ID: 3E52143322870FFA (displayed as fa0f87223314523e in little-endian hex) + const TEST_PUBLIC_KEY: &str = "\ +untrusted comment: minisign public key 3E52143322870FFA +RWT6D4ciMxRSPupBP+64kBYHS38aPGWasxvKW6sKjalBw93Ao3tQojyB"; + + const TEST_PUBLIC_KEY_BASE64: &str = "RWT6D4ciMxRSPupBP+64kBYHS38aPGWasxvKW6sKjalBw93Ao3tQojyB"; + + // Second key for testing untrusted key scenarios + // Key ID: 9DE0FE1340FC07FF + const TEST_PUBLIC_KEY_2: &str = "\ +untrusted comment: minisign public key 9DE0FE1340FC07FF +RWT/B/xAE/7gnfgr0vDarJzAmJSsI2ChTNLL0RrBhNOUb7TSpNQbWD7/"; + + const TEST_MESSAGE: &[u8] = b"test message"; + + // Signature for TEST_MESSAGE using TEST_PUBLIC_KEY (prehashed/ED) + const TEST_SIGNATURE: &str = "\ +untrusted comment: signature from minisign secret key +RUT6D4ciMxRSPupIJ/JuScXnkKUNfvxSkH9aWoJ/qkpqCnCocjUPC782vYGAjrPsGvwQIV/ZEJGz2RG2pK9NE5qXzsEEbJXBzQE= +trusted comment: timestamp:1769605868 file:test_ms.txt hashed +Se0A1R+LfBuUD27evCFZ0ckKpR6P9j1Meebdk23uLFeqefFoBGjxEOodWnigTwiVxUcfZjksdyLTrPM5Cu/pCQ=="; + + // Signature for TEST_MESSAGE using TEST_PUBLIC_KEY_2 (different key ID) + const TEST_SIGNATURE_WRONG_KEY_ID: &str = "\ +untrusted comment: signature from minisign secret key +RUT/B/xAE/7gnXUiMaqN9jw88kaGVmrdIy6QaYT4NKO6Q+0u7WYnxqo/UB84TsWw6KAoF5BhJLKdifcAkGGZa9KXvzci8/FOFA8= +trusted comment: timestamp:1769606000 file:test_ms.txt hashed +P/pRiW89ReghkdC5ZJQuaVJtNy+NFYFkfNYG4d+X3z5C90iPub8+bD1Smu3euP+OijknBQudPhea/5w3QDp3BA=="; + + // Expected key ID hex (bytes fa 0f 87 22 33 14 52 3e in order) + const TEST_KEY_ID_HEX: &str = "fa0f87223314523e"; + + // ==================== Public Key Parsing Tests ==================== + + #[test] + fn parse_valid_public_key() { + let key = MinisignPublicKey::parse(TEST_PUBLIC_KEY).unwrap(); + assert_eq!(key.key_id_hex(), TEST_KEY_ID_HEX); + } + + #[test] + fn parse_public_key_base64_only() { + // Public key without comment line + let key = MinisignPublicKey::parse(TEST_PUBLIC_KEY_BASE64).unwrap(); + assert_eq!(key.key_id_hex(), TEST_KEY_ID_HEX); + } + + #[test] + fn parse_public_key_with_comments() { + // Key with extra blank lines and the standard comment + let key_with_whitespace = format!("\n\n{TEST_PUBLIC_KEY}\n\n"); + let key = MinisignPublicKey::parse(&key_with_whitespace).unwrap(); + assert_eq!(key.key_id_hex(), TEST_KEY_ID_HEX); + } + + #[test] + fn parse_public_key_rejects_wrong_length() { + // Too short - missing bytes + let short_key = "RWTAAA=="; + let err = MinisignPublicKey::parse(short_key).unwrap_err(); + assert!(err.to_string().contains("Invalid minisign public key length")); + } + + #[test] + fn parse_public_key_rejects_wrong_algorithm() { + // Build a 42-byte key with wrong algorithm bytes (XX instead of Ed) + // XX (2 bytes) + key_id (8 bytes) + pubkey (32 bytes) = 42 bytes + let mut wrong_algo_bytes = vec![0u8; 42]; + wrong_algo_bytes[0] = b'X'; + wrong_algo_bytes[1] = b'X'; + let wrong_algo = general_purpose::STANDARD.encode(&wrong_algo_bytes); + let err = MinisignPublicKey::parse(&wrong_algo).unwrap_err(); + assert!( + err.to_string().contains("Unsupported minisign public key algorithm"), + "Expected algorithm error, got: {err}" + ); + } + + #[test] + fn parse_public_key_rejects_invalid_base64() { + let invalid = "not valid base64!!!"; + let err = MinisignPublicKey::parse(invalid).unwrap_err(); + // Error is wrapped with context, check the full chain + assert!( + format!("{err:?}").contains("Base64 decode failed"), + "Expected error to contain 'Base64 decode failed', got: {err:?}" + ); + } + + #[test] + fn parse_public_key_rejects_empty() { + let err = MinisignPublicKey::parse("").unwrap_err(); + assert!(err.to_string().contains("payload line not found")); + + let err = MinisignPublicKey::parse(" \n \n ").unwrap_err(); + assert!(err.to_string().contains("payload line not found")); + } + + #[test] + fn parse_public_key_rejects_only_comments() { + let only_comment = "untrusted comment: some key\n"; + let err = MinisignPublicKey::parse(only_comment).unwrap_err(); + assert!(err.to_string().contains("payload line not found")); + } + + // ==================== Signature Parsing Tests ==================== + + #[test] + fn parse_valid_signature_hashed() { + let sig = MinisignSignature::parse(TEST_SIGNATURE).unwrap(); + assert!(sig.prehashed); + assert_eq!(sig.key_id_hex(), TEST_KEY_ID_HEX); + } + + #[test] + fn parse_valid_signature_unhashed() { + // Manually construct an unhashed signature (Ed instead of ED) + // Decode a valid signature, change algorithm bytes, re-encode + let mut modified = general_purpose::STANDARD + .decode("RUT6D4ciMxRSPupIJ/JuScXnkKUNfvxSkH9aWoJ/qkpqCnCocjUPC782vYGAjrPsGvwQIV/ZEJGz2RG2pK9NE5qXzsEEbJXBzQE=") + .unwrap(); + // Change ED (0x45, 0x44) to Ed (0x45, 0x64) + modified[1] = 0x64; // 'd' instead of 'D' + let unhashed_base64 = general_purpose::STANDARD.encode(&modified); + + let sig = MinisignSignature::parse(&unhashed_base64).unwrap(); + assert!(!sig.prehashed); + assert_eq!(sig.key_id_hex(), TEST_KEY_ID_HEX); + } + + #[test] + fn parse_signature_rejects_wrong_length() { + // Too short + let short_sig = "RWTAAA=="; + let err = MinisignSignature::parse(short_sig).unwrap_err(); + assert!(err.to_string().contains("Invalid minisign signature length")); + } + + #[test] + fn parse_signature_rejects_wrong_algorithm() { + // Build a 74-byte signature with wrong algorithm bytes (XX instead of Ed/ED) + // XX (2 bytes) + key_id (8 bytes) + signature (64 bytes) = 74 bytes + let mut wrong_algo_bytes = vec![0u8; 74]; + wrong_algo_bytes[0] = b'X'; + wrong_algo_bytes[1] = b'X'; + let wrong_algo = general_purpose::STANDARD.encode(&wrong_algo_bytes); + let err = MinisignSignature::parse(&wrong_algo).unwrap_err(); + assert!( + err.to_string().contains("Unsupported minisign signature algorithm"), + "Expected algorithm error, got: {err}" + ); + } + + #[test] + fn parse_signature_rejects_invalid_base64() { + let invalid = "!!!invalid base64!!!"; + let err = MinisignSignature::parse(invalid).unwrap_err(); + // Error is wrapped with context, check the full chain + assert!( + format!("{err:?}").contains("Base64 decode failed"), + "Expected error to contain 'Base64 decode failed', got: {err:?}" + ); + } + + #[test] + fn parse_signature_rejects_empty() { + let err = MinisignSignature::parse("").unwrap_err(); + assert!(err.to_string().contains("payload line not found")); + } + + // ==================== Verifier Construction Tests ==================== + + #[test] + fn verifier_from_single_key() { + let verifier = + MinisignVerifier::from_trusted_pubkeys(&[TEST_PUBLIC_KEY.to_string()]).unwrap(); + assert_eq!(verifier.trusted_keys.len(), 1); + } + + #[test] + fn verifier_from_multiple_keys() { + let verifier = MinisignVerifier::from_trusted_pubkeys(&[ + TEST_PUBLIC_KEY.to_string(), + TEST_PUBLIC_KEY_2.to_string(), + ]) + .unwrap(); + assert_eq!(verifier.trusted_keys.len(), 2); + } + + #[test] + fn verifier_skips_empty_strings() { + let verifier = MinisignVerifier::from_trusted_pubkeys(&[ + String::new(), + TEST_PUBLIC_KEY.to_string(), + " ".to_string(), + "\n\n".to_string(), + ]) + .unwrap(); + assert_eq!(verifier.trusted_keys.len(), 1); + } + + #[test] + fn verifier_rejects_malformed_key() { + let err = + MinisignVerifier::from_trusted_pubkeys(&["not a valid key".to_string()]).unwrap_err(); + // Error is wrapped with context, check the full chain + assert!( + format!("{err:?}").contains("Base64 decode failed"), + "Expected error to contain 'Base64 decode failed', got: {err:?}" + ); + } + + #[test] + fn verifier_empty_keys_allowed() { + // Empty key list is allowed at construction time (error at verify time) + let verifier = MinisignVerifier::from_trusted_pubkeys(&[]).unwrap(); + assert_eq!(verifier.trusted_keys.len(), 0); + } + + // ==================== Verification Tests ==================== + + #[test] + fn verify_valid_signature() { + let verifier = + MinisignVerifier::from_trusted_pubkeys(&[TEST_PUBLIC_KEY.to_string()]).unwrap(); + let result = verifier.verify(TEST_MESSAGE, TEST_SIGNATURE).unwrap(); + assert_eq!(result.key_id, TEST_KEY_ID_HEX); + } + + #[test] + fn verify_with_multiple_trusted_keys() { + // Verifier with multiple keys should find the right one + let verifier = MinisignVerifier::from_trusted_pubkeys(&[ + TEST_PUBLIC_KEY_2.to_string(), + TEST_PUBLIC_KEY.to_string(), + ]) + .unwrap(); + let result = verifier.verify(TEST_MESSAGE, TEST_SIGNATURE).unwrap(); + assert_eq!(result.key_id, TEST_KEY_ID_HEX); + } + + #[test] + fn verify_fails_wrong_message() { + let verifier = + MinisignVerifier::from_trusted_pubkeys(&[TEST_PUBLIC_KEY.to_string()]).unwrap(); + let err = verifier.verify(b"wrong message", TEST_SIGNATURE).unwrap_err(); + assert!(err.to_string().contains("verification failed")); + } + + #[test] + fn verify_fails_untrusted_key_id() { + // Use signature from key 2 but only trust key 1 + let verifier = + MinisignVerifier::from_trusted_pubkeys(&[TEST_PUBLIC_KEY.to_string()]).unwrap(); + let err = verifier.verify(TEST_MESSAGE, TEST_SIGNATURE_WRONG_KEY_ID).unwrap_err(); + assert!(err.to_string().contains("not in the trusted key set")); + } + + #[test] + fn verify_fails_no_trusted_keys() { + let verifier = MinisignVerifier::from_trusted_pubkeys(&[]).unwrap(); + let err = verifier.verify(TEST_MESSAGE, TEST_SIGNATURE).unwrap_err(); + assert!(err.to_string().contains("No trusted minisign public keys configured")); + } + + #[test] + fn verify_fails_corrupted_signature() { + let verifier = + MinisignVerifier::from_trusted_pubkeys(&[TEST_PUBLIC_KEY.to_string()]).unwrap(); + + // Corrupt the signature by modifying a byte in the base64 payload + let corrupted = TEST_SIGNATURE.replace( + "RUT6D4ciMxRSPupIJ/JuScXnkKUNfvxSkH9aWoJ/qkpqCnCocjUPC782vYGAjrPsGvwQIV/ZEJGz2RG2pK9NE5qXzsEEbJXBzQE=", + "RUT6D4ciMxRSPupIJ/JuScXnkKUNfvxSkH9aWoJ/qkpqCnCocjUPC782vYGAjrPsGvwQIV/ZEJGz2RG2pK9NE5qXzsEEbJXBzQA=", + ); + + let err = verifier.verify(TEST_MESSAGE, &corrupted).unwrap_err(); + assert!(err.to_string().contains("verification failed")); + } + + // ==================== Helper Function Tests ==================== + + #[test] + fn key_id_hex_formats_correctly() { + let key_id: [u8; 8] = [0xfa, 0x0f, 0x87, 0x22, 0x33, 0x14, 0x52, 0x3e]; + assert_eq!(key_id_hex(key_id), "fa0f87223314523e"); + } + + #[test] + fn key_id_hex_handles_leading_zeros() { + let key_id: [u8; 8] = [0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07]; + assert_eq!(key_id_hex(key_id), "0001020304050607"); + } + + #[test] + fn extract_minisign_payload_skips_comments() { + let input = "\ +untrusted comment: some comment +RWT6D4ciMxRSPupBP+64kBYHS38aPGWasxvKW6sKjalBw93Ao3tQojyB +trusted comment: another comment +abcd"; + let payload = extract_minisign_payload(input).unwrap(); + assert_eq!(payload, "RWT6D4ciMxRSPupBP+64kBYHS38aPGWasxvKW6sKjalBw93Ao3tQojyB"); + } + + #[test] + fn extract_minisign_payload_handles_whitespace() { + let input = " \n \n RWT6D4ciMxRSPupBP+64kBYHS38aPGWasxvKW6sKjalBw93Ao3tQojyB \n"; + let payload = extract_minisign_payload(input).unwrap(); + assert_eq!(payload, "RWT6D4ciMxRSPupBP+64kBYHS38aPGWasxvKW6sKjalBw93Ao3tQojyB"); + } + + #[test] + fn decode_base64_line_valid() { + let decoded = decode_base64_line("SGVsbG8=").unwrap(); + assert_eq!(decoded, b"Hello"); + } + + #[test] + fn decode_base64_line_invalid() { + let err = decode_base64_line("!!!").unwrap_err(); + assert!(err.to_string().contains("Base64 decode failed")); + } +} diff --git a/apps/skit/src/marketplace_installer.rs b/apps/skit/src/marketplace_installer.rs new file mode 100644 index 00000000..47d52907 --- /dev/null +++ b/apps/skit/src/marketplace_installer.rs @@ -0,0 +1,2244 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +use std::{ + collections::{HashMap, HashSet, VecDeque}, + fmt::Write, + path::{Component, Path, PathBuf}, + sync::Arc, + time::{Duration, SystemTime, UNIX_EPOCH}, +}; + +use anyhow::{anyhow, Context, Result}; +use futures::StreamExt; +use reqwest::Client; +use semver::{Version, VersionReq}; +use serde::{Deserialize, Serialize}; +use sha2::{Digest, Sha256}; +use tokio::io::AsyncWriteExt; +use tokio::sync::Mutex; +use tokio_util::sync::CancellationToken; +use tracing::info; +use uuid::Uuid; + +use crate::{ + config::{MarketplaceHostPolicy, MarketplaceSchemePolicy, PluginConfig}, + marketplace::{MinisignVerifier, PluginKind, RegistryClient, RegistryIndex}, + marketplace_security::{origin_key, validated_get_response, MarketplaceUrlPolicy, OriginKey}, + permissions::Permissions, + plugin_paths, + plugin_records::{ + active_dir as plugin_active_dir, record_path as plugin_record_path, ActivePluginRecord, + }, + plugins::{PluginSummary, PluginType, SharedUnifiedPluginManager}, +}; + +const STEP_DOWNLOAD_MANIFEST: &str = "download_manifest"; +const STEP_VERIFY_SIGNATURE: &str = "verify_signature"; +const STEP_DOWNLOAD_BUNDLE: &str = "download_bundle"; +const STEP_EXTRACT_BUNDLE: &str = "extract_bundle"; +const STEP_ACTIVATE: &str = "activate"; +const STEP_LOAD_PLUGIN: &str = "load_plugin"; +const STEP_DOWNLOAD_MODELS: &str = "download_models"; + +const REGISTRY_TIMEOUT_SECS: u64 = 20; +const REGISTRY_INDEX_TTL_SECS: u64 = 60; +const REGISTRY_MANIFEST_TTL_SECS: u64 = 60; +const MAX_JOB_HISTORY: usize = 200; + +#[derive(Debug, Clone, Deserialize)] +pub struct InstallPluginRequest { + pub registry: String, + pub plugin_id: String, + pub version: Option, + #[serde(default)] + pub install_models: bool, + #[serde(default)] + pub model_ids: Option>, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "lowercase")] +pub enum JobStatus { + Queued, + Running, + Succeeded, + Failed, + Cancelled, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "lowercase")] +pub enum StepStatus { + Pending, + Running, + Succeeded, + Failed, +} + +#[derive(Debug, Clone, Default, Serialize)] +pub struct JobProgress { + #[serde(skip_serializing_if = "Option::is_none")] + pub bytes_done: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub bytes_total: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub items_done: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub items_total: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub current_item: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub rate_bytes_per_sec: Option, +} + +#[derive(Debug, Clone, Serialize)] +pub struct JobStep { + pub name: String, + pub status: StepStatus, + #[serde(skip_serializing_if = "Option::is_none")] + pub progress: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, +} + +#[derive(Debug, Clone, Serialize)] +pub struct JobInfo { + pub status: JobStatus, + pub started_at_ms: Option, + pub updated_at_ms: u128, + pub summary: String, + pub steps: Vec, +} + +#[derive(Clone)] +pub struct InstallJobQueue { + state: Arc>, + installer: Arc, +} + +impl InstallJobQueue { + /// Creates a new install queue with registry and verification settings. + /// + /// # Errors + /// + /// Returns an error if the registry client or verifier cannot be initialized. + pub fn new(config: &PluginConfig, plugin_manager: SharedUnifiedPluginManager) -> Result { + let registry_client = RegistryClient::new( + Duration::from_secs(REGISTRY_TIMEOUT_SECS), + Duration::from_secs(REGISTRY_INDEX_TTL_SECS), + Duration::from_secs(REGISTRY_MANIFEST_TTL_SECS), + )?; + let verifier = MinisignVerifier::from_trusted_pubkeys(&config.trusted_pubkeys)?; + let models_dir = config + .models_dir + .as_ref() + .map(|dir| dir.trim()) + .filter(|dir| !dir.is_empty()) + .map_or_else(|| PathBuf::from("models"), PathBuf::from); + let huggingface_token = config + .huggingface_token + .as_ref() + .map(|token| token.trim().to_string()) + .filter(|token| !token.is_empty()); + if matches!( + config.marketplace.security.marketplace_scheme_policy, + MarketplaceSchemePolicy::AllowHttp + ) || matches!( + config.marketplace.security.marketplace_host_policy, + MarketplaceHostPolicy::AllowPrivate + ) { + tracing::warn!( + allow_http = matches!( + config.marketplace.security.marketplace_scheme_policy, + MarketplaceSchemePolicy::AllowHttp + ), + allow_private_hosts = matches!( + config.marketplace.security.marketplace_host_policy, + MarketplaceHostPolicy::AllowPrivate + ), + "Marketplace URL policy allows non-default schemes or private hosts; intended for development" + ); + } + let marketplace_policy = MarketplaceUrlPolicy::from_config(config); + let installer = PluginInstaller::new( + registry_client, + verifier, + plugin_manager, + PluginInstallerSettings { + plugin_dir: PathBuf::from(&config.directory), + models_dir, + huggingface_token, + allow_native_marketplace: config.marketplace.allow_native_marketplace, + allow_model_urls: config.marketplace.security.allow_model_urls, + marketplace_policy, + registries: config.registries.clone(), + }, + )?; + Ok(Self { + state: Arc::new(Mutex::new(InstallQueueState::default())), + installer: Arc::new(installer), + }) + } + + pub fn registries(&self) -> Vec { + self.installer.registries.clone() + } + + pub fn registry_client(&self) -> RegistryClient { + self.installer.registry_client.clone() + } + + pub fn verifier(&self) -> MinisignVerifier { + self.installer.verifier.clone() + } + + pub async fn enqueue(&self, request: InstallPluginRequest, permissions: Permissions) -> String { + let job_id = Uuid::new_v4().to_string(); + let steps = install_steps(); + let info = JobInfo { + status: JobStatus::Queued, + started_at_ms: None, + updated_at_ms: now_ms(), + summary: "Queued".to_string(), + steps, + }; + let job = InstallJob { info, cancel: CancellationToken::new(), request, permissions }; + + let mut start_worker = false; + let mut state = self.state.lock().await; + state.jobs.insert(job_id.clone(), job); + state.queue.push_back(job_id.clone()); + state.job_order.push_back(job_id.clone()); + state.prune_jobs(); + + if !state.worker_running { + state.worker_running = true; + start_worker = true; + } + drop(state); + + if start_worker { + let queue = self.clone(); + tokio::spawn(async move { + queue.run_worker().await; + }); + } + + job_id + } + + pub async fn get_job(&self, job_id: &str) -> Option { + let state = self.state.lock().await; + state.jobs.get(job_id).map(|job| job.info.clone()) + } + + pub async fn cancel_job(&self, job_id: &str) -> Option { + let mut state = self.state.lock().await; + let mut remove_from_queue = false; + { + let job = state.jobs.get_mut(job_id)?; + match job.info.status { + JobStatus::Queued => { + job.cancel.cancel(); + job.info.status = JobStatus::Cancelled; + job.info.summary = "Cancelled".to_string(); + job.info.updated_at_ms = now_ms(); + remove_from_queue = true; + }, + JobStatus::Running => { + job.cancel.cancel(); + job.info.summary = "Cancelling".to_string(); + job.info.updated_at_ms = now_ms(); + }, + JobStatus::Succeeded | JobStatus::Failed | JobStatus::Cancelled => {}, + } + } + if remove_from_queue { + state.queue.retain(|id| id != job_id); + } + state.jobs.get(job_id).map(|job| job.info.clone()) + } + + pub fn is_registry_configured(&self, registry: &str) -> bool { + self.installer.is_registry_configured(registry) + } + + async fn run_worker(self) { + loop { + let Some(context) = self.next_job().await else { + let mut state = self.state.lock().await; + state.worker_running = false; + drop(state); + break; + }; + + if context.cancel.is_cancelled() { + self.mark_cancelled(&context.job_id, "Cancelled").await; + continue; + } + + let tracker = JobTracker { job_id: context.job_id.clone(), queue: self.clone() }; + + tracker.set_status(JobStatus::Running, "Starting install").await; + let result = self + .installer + .install( + context.request, + context.permissions, + tracker.clone(), + context.cancel.clone(), + ) + .await; + + match result { + Ok(()) => { + tracker.set_status(JobStatus::Succeeded, "Install completed").await; + }, + Err(InstallError::Cancelled) => { + tracker.mark_cancelled("Cancelled").await; + }, + Err(InstallError::Other(err)) => { + tracker.set_status(JobStatus::Failed, format!("Install failed: {err}")).await; + }, + } + } + } + + async fn next_job(&self) -> Option { + loop { + let mut state = self.state.lock().await; + let job_id = state.queue.pop_front()?; + let job = state.jobs.get_mut(&job_id)?; + if matches!(job.info.status, JobStatus::Cancelled) { + continue; + } + job.info.status = JobStatus::Running; + job.info.started_at_ms = Some(now_ms()); + job.info.updated_at_ms = now_ms(); + job.info.summary = "Running".to_string(); + let request = job.request.clone(); + let permissions = job.permissions.clone(); + let cancel = job.cancel.clone(); + drop(state); + return Some(JobContext { job_id, request, permissions, cancel }); + } + } + + async fn update_job(&self, job_id: &str, mut update: F) + where + F: FnMut(&mut JobInfo), + { + let mut state = self.state.lock().await; + if let Some(job) = state.jobs.get_mut(job_id) { + update(&mut job.info); + job.info.updated_at_ms = now_ms(); + } + } + + async fn mark_cancelled(&self, job_id: &str, summary: &str) { + self.update_job(job_id, |info| { + info.status = JobStatus::Cancelled; + info.summary = summary.to_string(); + for step in &mut info.steps { + if matches!(step.status, StepStatus::Running) { + step.status = StepStatus::Failed; + step.error = Some("Cancelled".to_string()); + } + } + }) + .await; + } +} + +#[derive(Default)] +struct InstallQueueState { + jobs: HashMap, + queue: VecDeque, + job_order: VecDeque, + worker_running: bool, +} + +impl InstallQueueState { + fn prune_jobs(&mut self) { + if self.jobs.len() <= MAX_JOB_HISTORY { + return; + } + + // Enforce the cap by pruning terminal jobs first, then the oldest queued jobs. + let mut pruned: HashSet = HashSet::new(); + + for job_id in &self.job_order { + if self.jobs.len().saturating_sub(pruned.len()) <= MAX_JOB_HISTORY { + break; + } + let should_prune = self.jobs.get(job_id).is_none_or(|job| { + matches!( + job.info.status, + JobStatus::Succeeded | JobStatus::Failed | JobStatus::Cancelled + ) + }); + if should_prune { + pruned.insert(job_id.clone()); + } + } + + for job_id in &self.job_order { + if self.jobs.len().saturating_sub(pruned.len()) <= MAX_JOB_HISTORY { + break; + } + let should_prune = self + .jobs + .get(job_id) + .is_none_or(|job| matches!(job.info.status, JobStatus::Queued)); + if should_prune { + pruned.insert(job_id.clone()); + } + } + + if pruned.is_empty() { + return; + } + + for job_id in &pruned { + self.jobs.remove(job_id); + } + self.queue.retain(|job_id| !pruned.contains(job_id)); + self.job_order.retain(|job_id| !pruned.contains(job_id)); + } +} + +struct InstallJob { + info: JobInfo, + cancel: CancellationToken, + request: InstallPluginRequest, + permissions: Permissions, +} + +#[derive(Clone)] +struct JobTracker { + job_id: String, + queue: InstallJobQueue, +} + +impl JobTracker { + async fn set_status>(&self, status: JobStatus, summary: S) { + let summary = summary.into(); + self.queue + .update_job(&self.job_id, |info| { + info.status = status.clone(); + info.summary.clone_from(&summary); + }) + .await; + } + + async fn start_step(&self, step_name: &str) { + self.queue + .update_job(&self.job_id, |info| { + if let Some(step) = info.steps.iter_mut().find(|step| step.name == step_name) { + step.status = StepStatus::Running; + step.error = None; + step.progress = None; + } + }) + .await; + } + + async fn succeed_step(&self, step_name: &str) { + self.queue + .update_job(&self.job_id, |info| { + if let Some(step) = info.steps.iter_mut().find(|step| step.name == step_name) { + step.status = StepStatus::Succeeded; + } + }) + .await; + } + + async fn fail_step(&self, step_name: &str, error: String) { + self.queue + .update_job(&self.job_id, |info| { + if let Some(step) = info.steps.iter_mut().find(|step| step.name == step_name) { + step.status = StepStatus::Failed; + step.error = Some(error.clone()); + } + }) + .await; + } + + async fn update_progress(&self, step_name: &str, progress: JobProgress) { + self.queue + .update_job(&self.job_id, |info| { + if let Some(step) = info.steps.iter_mut().find(|step| step.name == step_name) { + step.progress = Some(progress.clone()); + } + }) + .await; + } + + async fn mark_cancelled(&self, summary: &str) { + self.queue.mark_cancelled(&self.job_id, summary).await; + } +} + +struct JobContext { + job_id: String, + request: InstallPluginRequest, + permissions: Permissions, + cancel: CancellationToken, +} + +#[derive(Clone)] +struct PluginInstaller { + registry_client: RegistryClient, + download_client: Client, + verifier: MinisignVerifier, + plugin_manager: SharedUnifiedPluginManager, + plugin_dir: PathBuf, + models_dir: PathBuf, + huggingface_token: Option, + allow_native_marketplace: bool, + allow_model_urls: bool, + marketplace_policy: MarketplaceUrlPolicy, + registries: Vec, +} + +struct PluginInstallerSettings { + plugin_dir: PathBuf, + models_dir: PathBuf, + huggingface_token: Option, + allow_native_marketplace: bool, + allow_model_urls: bool, + marketplace_policy: MarketplaceUrlPolicy, + registries: Vec, +} + +struct DownloadModelRequest<'a> { + url: &'a str, + target_path: &'a Path, + display_name: &'a str, + items_done: u64, + items_total: u64, + expected_size: Option, + expected_sha256: Option<&'a str>, + bearer_token: Option<&'a str>, + registry_origin: Option, +} + +impl PluginInstaller { + fn new( + registry_client: RegistryClient, + verifier: MinisignVerifier, + plugin_manager: SharedUnifiedPluginManager, + settings: PluginInstallerSettings, + ) -> Result { + let download_client = Client::builder() + .timeout(Duration::from_secs(REGISTRY_TIMEOUT_SECS)) + .redirect(reqwest::redirect::Policy::none()) + .build() + .context("Failed to build bundle HTTP client")?; + Ok(Self { + registry_client, + download_client, + verifier, + plugin_manager, + plugin_dir: settings.plugin_dir, + models_dir: settings.models_dir, + huggingface_token: settings.huggingface_token, + allow_native_marketplace: settings.allow_native_marketplace, + allow_model_urls: settings.allow_model_urls, + marketplace_policy: settings.marketplace_policy, + registries: settings.registries, + }) + } + + fn is_registry_configured(&self, registry: &str) -> bool { + self.registries.iter().any(|entry| entry == registry) + } + + async fn install( + &self, + request: InstallPluginRequest, + permissions: Permissions, + tracker: JobTracker, + cancel: CancellationToken, + ) -> Result<(), InstallError> { + let registry_url = self.resolve_registry(&request.registry)?; + let plugin_id = request.plugin_id.trim().to_string(); + if plugin_id.is_empty() { + return Err(anyhow!("Plugin id must not be empty").into()); + } + + tracker.start_step(STEP_DOWNLOAD_MANIFEST).await; + if let Err(err) = plugin_paths::validate_path_component("plugin id", &plugin_id) { + tracker.fail_step(STEP_DOWNLOAD_MANIFEST, err.to_string()).await; + return Err(err.into()); + } + + let registry_url = + match self.marketplace_policy.validate_url("registry index", ®istry_url, None).await + { + Ok(url) => url, + Err(err) => { + tracker.fail_step(STEP_DOWNLOAD_MANIFEST, err.to_string()).await; + return Err(err.into()); + }, + }; + let registry_origin = match origin_key(®istry_url) { + Ok(origin) => origin, + Err(err) => { + tracker.fail_step(STEP_DOWNLOAD_MANIFEST, err.to_string()).await; + return Err(err.into()); + }, + }; + + let index = match self + .registry_client + .fetch_index_with_policy(®istry_url, &self.marketplace_policy, ®istry_origin) + .await + { + Ok(index) => index, + Err(err) => { + tracker.fail_step(STEP_DOWNLOAD_MANIFEST, err.to_string()).await; + return Err(err.into()); + }, + }; + let version_entry = + match select_registry_version(&index, &plugin_id, request.version.as_deref()) { + Ok(entry) => entry, + Err(err) => { + tracker.fail_step(STEP_DOWNLOAD_MANIFEST, err.to_string()).await; + return Err(err.into()); + }, + }; + tracker.succeed_step(STEP_DOWNLOAD_MANIFEST).await; + + tracker.start_step(STEP_VERIFY_SIGNATURE).await; + let manifest_url = match self + .marketplace_policy + .validate_url("manifest url", &version_entry.manifest_url, Some(®istry_origin)) + .await + { + Ok(url) => url, + Err(err) => { + tracker.fail_step(STEP_VERIFY_SIGNATURE, err.to_string()).await; + return Err(err.into()); + }, + }; + let signature_url_raw = version_entry + .signature_url + .clone() + .unwrap_or_else(|| format!("{}.minisig", manifest_url.as_str())); + let signature_url = match self + .marketplace_policy + .validate_url("signature url", &signature_url_raw, Some(®istry_origin)) + .await + { + Ok(url) => url, + Err(err) => { + tracker.fail_step(STEP_VERIFY_SIGNATURE, err.to_string()).await; + return Err(err.into()); + }, + }; + let manifest_raw = match self + .registry_client + .fetch_manifest_raw_with_policy( + &manifest_url, + &self.marketplace_policy, + ®istry_origin, + ) + .await + { + Ok(raw) => raw, + Err(err) => { + tracker.fail_step(STEP_VERIFY_SIGNATURE, err.to_string()).await; + return Err(err.into()); + }, + }; + let signature_text = match self + .registry_client + .fetch_text_with_policy( + "signature url", + &signature_url, + &self.marketplace_policy, + ®istry_origin, + ) + .await + { + Ok(text) => text, + Err(err) => { + tracker.fail_step(STEP_VERIFY_SIGNATURE, err.to_string()).await; + return Err(err.into()); + }, + }; + if let Err(err) = self.verifier.verify(manifest_raw.bytes.as_ref(), &signature_text) { + tracker.fail_step(STEP_VERIFY_SIGNATURE, err.to_string()).await; + return Err(err.into()); + } + + let manifest = manifest_raw.manifest; + if manifest.id != plugin_id { + let manifest_id = manifest.id.as_str(); + let requested_id = plugin_id.as_str(); + let error = anyhow!( + "Manifest plugin id '{manifest_id}' does not match requested id '{requested_id}'" + ) + .to_string(); + tracker.fail_step(STEP_VERIFY_SIGNATURE, error.clone()).await; + return Err(anyhow!(error).into()); + } + + if manifest.version != version_entry.version { + let manifest_version = manifest.version.as_str(); + let requested_version = version_entry.version.as_str(); + let error = anyhow!( + "Manifest version '{manifest_version}' does not match requested version '{requested_version}'" + ) + .to_string(); + tracker.fail_step(STEP_VERIFY_SIGNATURE, error.clone()).await; + return Err(anyhow!(error).into()); + } + + if let Err(err) = plugin_paths::validate_path_component("plugin id", &manifest.id) { + tracker.fail_step(STEP_VERIFY_SIGNATURE, err.to_string()).await; + return Err(err.into()); + } + + if let Err(err) = plugin_paths::validate_path_component("plugin version", &manifest.version) + { + tracker.fail_step(STEP_VERIFY_SIGNATURE, err.to_string()).await; + return Err(err.into()); + } + + if let Err(err) = validate_manifest_compatibility(&manifest) { + tracker.fail_step(STEP_VERIFY_SIGNATURE, err.to_string()).await; + return Err(err.into()); + } + + let namespaced_kind = namespaced_kind(&manifest); + if !permissions.is_plugin_allowed(&namespaced_kind) { + let error = format!("Plugin '{namespaced_kind}' is not allowed by policy"); + tracker.fail_step(STEP_VERIFY_SIGNATURE, error.clone()).await; + return Err(anyhow!(error).into()); + } + + if matches!(manifest.kind, PluginKind::Native) && !self.allow_native_marketplace { + let error = "Native marketplace installs are disabled".to_string(); + tracker.fail_step(STEP_VERIFY_SIGNATURE, error.clone()).await; + return Err(anyhow!(error).into()); + } + + tracker.succeed_step(STEP_VERIFY_SIGNATURE).await; + + Self::ensure_not_cancelled(&cancel)?; + + self.handle_bundle_install( + &request, + &manifest, + &tracker, + &cancel, + ®istry_origin, + &namespaced_kind, + ) + .await?; + + Self::ensure_not_cancelled(&cancel)?; + + self.handle_model_downloads(&request, &manifest, &tracker, &cancel, ®istry_origin) + .await?; + + Ok(()) + } + + async fn handle_bundle_install( + &self, + request: &InstallPluginRequest, + manifest: &crate::marketplace::PluginManifest, + tracker: &JobTracker, + cancel: &CancellationToken, + registry_origin: &OriginKey, + namespaced_kind: &str, + ) -> Result<(), InstallError> { + Self::ensure_not_cancelled(cancel)?; + tracker.start_step(STEP_DOWNLOAD_BUNDLE).await; + let base_real = match plugin_paths::ensure_base_dir(&self.plugin_dir).await { + Ok(base_real) => base_real, + Err(err) => { + tracker.fail_step(STEP_DOWNLOAD_BUNDLE, err.to_string()).await; + return Err(err.into()); + }, + }; + + let bundle_dir = self.plugin_dir.join("bundles").join(&manifest.id).join(&manifest.version); + if bundle_dir.exists() { + if !request.install_models { + let err = anyhow!("Bundle version '{}' is already installed", manifest.version); + tracker.fail_step(STEP_DOWNLOAD_BUNDLE, err.to_string()).await; + return Err(err.into()); + } + if let Err(err) = + plugin_paths::ensure_existing_dir_under(&base_real, &bundle_dir, "bundle").await + { + tracker.fail_step(STEP_DOWNLOAD_BUNDLE, err.to_string()).await; + return Err(err.into()); + } + tracker.succeed_step(STEP_DOWNLOAD_BUNDLE).await; + for step in [STEP_EXTRACT_BUNDLE, STEP_ACTIVATE, STEP_LOAD_PLUGIN] { + Self::mark_step_succeeded(tracker, step).await; + } + return Ok(()); + } + + let bundle_path = match self + .download_bundle(manifest, tracker, cancel, registry_origin, &base_real) + .await + { + Ok(path) => path, + Err(InstallError::Cancelled) => return Err(InstallError::Cancelled), + Err(InstallError::Other(err)) => { + tracker.fail_step(STEP_DOWNLOAD_BUNDLE, err.to_string()).await; + return Err(InstallError::Other(err)); + }, + }; + tracker.succeed_step(STEP_DOWNLOAD_BUNDLE).await; + + Self::ensure_not_cancelled(cancel)?; + + tracker.start_step(STEP_EXTRACT_BUNDLE).await; + let bundle_dir = match self.extract_bundle(manifest, &bundle_path, &base_real, cancel).await + { + Ok(dir) => dir, + Err(InstallError::Cancelled) => return Err(InstallError::Cancelled), + Err(InstallError::Other(err)) => { + tracker.fail_step(STEP_EXTRACT_BUNDLE, err.to_string()).await; + return Err(InstallError::Other(err)); + }, + }; + tracker.succeed_step(STEP_EXTRACT_BUNDLE).await; + + Self::ensure_not_cancelled(cancel)?; + + tracker.start_step(STEP_ACTIVATE).await; + let entrypoint_path = match self.activate_bundle(manifest, &bundle_dir, &base_real).await { + Ok(path) => path, + Err(InstallError::Cancelled) => return Err(InstallError::Cancelled), + Err(InstallError::Other(err)) => { + tracker.fail_step(STEP_ACTIVATE, err.to_string()).await; + return Err(InstallError::Other(err)); + }, + }; + tracker.succeed_step(STEP_ACTIVATE).await; + + Self::ensure_not_cancelled(cancel)?; + + tracker.start_step(STEP_LOAD_PLUGIN).await; + match self.load_plugin(manifest, &entrypoint_path, namespaced_kind).await { + Ok(_) => { + tracker.succeed_step(STEP_LOAD_PLUGIN).await; + }, + Err(InstallError::Cancelled) => return Err(InstallError::Cancelled), + Err(InstallError::Other(err)) => { + tracker.fail_step(STEP_LOAD_PLUGIN, err.to_string()).await; + return Err(InstallError::Other(err)); + }, + } + + Ok(()) + } + + async fn handle_model_downloads( + &self, + request: &InstallPluginRequest, + manifest: &crate::marketplace::PluginManifest, + tracker: &JobTracker, + cancel: &CancellationToken, + registry_origin: &OriginKey, + ) -> Result<(), InstallError> { + Self::ensure_not_cancelled(cancel)?; + tracker.start_step(STEP_DOWNLOAD_MODELS).await; + + if request.model_ids.is_some() && !request.install_models { + let err = anyhow!("Model selection requires install_models=true"); + tracker.fail_step(STEP_DOWNLOAD_MODELS, err.to_string()).await; + return Err(InstallError::Other(err)); + } + if request.install_models && !manifest.models.is_empty() { + match self + .download_models( + manifest, + request.model_ids.as_deref(), + tracker, + cancel, + Some(registry_origin), + ) + .await + { + Ok(()) => { + tracker.succeed_step(STEP_DOWNLOAD_MODELS).await; + }, + Err(InstallError::Cancelled) => return Err(InstallError::Cancelled), + Err(InstallError::Other(err)) => { + tracker.fail_step(STEP_DOWNLOAD_MODELS, err.to_string()).await; + return Err(InstallError::Other(err)); + }, + } + } else { + tracker.succeed_step(STEP_DOWNLOAD_MODELS).await; + } + + Ok(()) + } + + fn ensure_not_cancelled(cancel: &CancellationToken) -> Result<(), InstallError> { + if cancel.is_cancelled() { + Err(InstallError::Cancelled) + } else { + Ok(()) + } + } + + async fn mark_step_succeeded(tracker: &JobTracker, step_name: &str) { + tracker.start_step(step_name).await; + tracker.succeed_step(step_name).await; + } + + fn resolve_registry(&self, registry: &str) -> Result { + if self.registries.is_empty() { + return Err(anyhow!("No registries are configured")); + } + if self.registries.iter().any(|entry| entry == registry) { + return Ok(registry.to_string()); + } + Err(anyhow!("Registry '{registry}' is not configured")) + } + + async fn download_bundle( + &self, + manifest: &crate::marketplace::PluginManifest, + tracker: &JobTracker, + cancel: &CancellationToken, + registry_origin: &OriginKey, + base_real: &Path, + ) -> Result { + let bundle_url = self + .marketplace_policy + .validate_url("bundle url", &manifest.bundle.url, Some(registry_origin)) + .await?; + let cache_dir = self.plugin_dir.join("cache").join(&manifest.id).join(&manifest.version); + plugin_paths::ensure_dir_under(base_real, &cache_dir, "cache").await?; + + let file_name = bundle_url + .path_segments() + .and_then(|mut segments| segments.next_back()) + .filter(|name| !name.is_empty()) + .unwrap_or("bundle.tar.zst"); + plugin_paths::validate_path_component("bundle file name", file_name)?; + + let bundle_path = cache_dir.join(file_name); + let temp_path = cache_dir.join(format!(".download-{}", Uuid::new_v4())); + + let download_result: Result<(), InstallError> = async { + let (response, final_url) = validated_get_response( + &self.download_client, + &self.marketplace_policy, + "bundle url", + &bundle_url, + Some(registry_origin), + None, + ) + .await?; + let response = response + .error_for_status() + .with_context(|| format!("Bundle download failed for {final_url}"))?; + let total_bytes = response.content_length(); + let mut stream = response.bytes_stream(); + + let mut file = tokio::fs::File::create(&temp_path).await.with_context(|| { + format!("Failed to create bundle file {temp_path}", temp_path = temp_path.display()) + })?; + let mut hasher = Sha256::new(); + let mut bytes_done = 0u64; + + while let Some(chunk) = stream.next().await { + let chunk = chunk.with_context(|| "Failed to read bundle download stream")?; + if cancel.is_cancelled() { + return Err(InstallError::Cancelled); + } + file.write_all(&chunk).await.with_context(|| { + format!("Failed to write bundle {temp_path}", temp_path = temp_path.display()) + })?; + hasher.update(&chunk); + bytes_done = bytes_done.saturating_add(chunk.len() as u64); + + let progress = JobProgress { + bytes_done: Some(bytes_done), + bytes_total: total_bytes, + ..JobProgress::default() + }; + tracker.update_progress(STEP_DOWNLOAD_BUNDLE, progress).await; + } + + file.flush().await.with_context(|| { + format!("Failed to flush bundle {temp_path}", temp_path = temp_path.display()) + })?; + file.sync_all().await.with_context(|| { + format!("Failed to sync bundle {temp_path}", temp_path = temp_path.display()) + })?; + + let actual_hash = to_hex(&hasher.finalize()); + if !actual_hash.eq_ignore_ascii_case(&manifest.bundle.sha256) { + let expected = manifest.bundle.sha256.as_str(); + let actual = actual_hash.as_str(); + return Err( + anyhow!("Bundle hash mismatch: expected {expected}, got {actual}").into() + ); + } + + Ok(()) + } + .await; + + if let Err(err) = download_result { + let _ = tokio::fs::remove_file(&temp_path).await; + return Err(err); + } + + if let Err(err) = tokio::fs::rename(&temp_path, &bundle_path).await { + let _ = tokio::fs::remove_file(&temp_path).await; + return Err(anyhow!( + "Failed to activate bundle download {bundle_path}: {err}", + bundle_path = bundle_path.display() + ) + .into()); + } + + Ok(bundle_path) + } + + async fn extract_bundle( + &self, + manifest: &crate::marketplace::PluginManifest, + bundle_path: &Path, + base_real: &Path, + cancel: &CancellationToken, + ) -> Result { + if cancel.is_cancelled() { + return Err(InstallError::Cancelled); + } + + let bundles_root = self.plugin_dir.join("bundles").join(&manifest.id); + plugin_paths::ensure_dir_under(base_real, &bundles_root, "bundles").await?; + + let bundle_dir = bundles_root.join(&manifest.version); + if bundle_dir.exists() { + let version = manifest.version.as_str(); + return Err(anyhow!("Bundle version '{version}' is already installed").into()); + } + + let temp_id = Uuid::new_v4(); + let temp_dir = bundles_root.join(format!(".tmp-{temp_id}")); + tokio::fs::create_dir_all(&temp_dir).await.with_context(|| { + format!("Failed to create temp dir {temp_dir}", temp_dir = temp_dir.display()) + })?; + + let bundle_path = bundle_path.to_path_buf(); + let temp_dir_clone = temp_dir.clone(); + let entrypoint = manifest.entrypoint.clone(); + let plugin_kind = manifest.kind.clone(); + let cancel_clone = cancel.clone(); + + let extraction = tokio::task::spawn_blocking(move || -> Result<(), anyhow::Error> { + validate_entrypoint(&entrypoint)?; + let file = std::fs::File::open(&bundle_path).with_context(|| { + format!("Failed to open bundle {bundle_path}", bundle_path = bundle_path.display()) + })?; + let reader: Box = + match bundle_path.extension().and_then(|ext| ext.to_str()) { + Some("zst") => Box::new(zstd::stream::read::Decoder::new(file)?), + Some("gz") => Box::new(flate2::read::GzDecoder::new(file)), + Some("tar") | None => Box::new(file), + Some(other) => { + return Err(anyhow!("Unsupported bundle extension '{other}'")); + }, + }; + safe_extract_archive(reader, &temp_dir_clone, Some(&cancel_clone))?; + + let entrypoint_path = temp_dir_clone.join(&entrypoint); + validate_entrypoint_for_kind(&plugin_kind, &entrypoint_path)?; + + Ok(()) + }) + .await + .context("Bundle extraction task failed"); + + if cancel.is_cancelled() { + let _ = tokio::fs::remove_dir_all(&temp_dir).await; + return Err(InstallError::Cancelled); + } + + match extraction { + Ok(Ok(())) => {}, + Ok(Err(err)) => { + let _ = tokio::fs::remove_dir_all(&temp_dir).await; + if cancel.is_cancelled() { + return Err(InstallError::Cancelled); + } + return Err(err.into()); + }, + Err(err) => { + let _ = tokio::fs::remove_dir_all(&temp_dir).await; + return Err(err.into()); + }, + } + + if cancel.is_cancelled() { + let _ = tokio::fs::remove_dir_all(&temp_dir).await; + return Err(InstallError::Cancelled); + } + + tokio::fs::rename(&temp_dir, &bundle_dir).await.with_context(|| { + format!("Failed to activate bundle {bundle_dir}", bundle_dir = bundle_dir.display()) + })?; + + Ok(bundle_dir) + } + + async fn activate_bundle( + &self, + manifest: &crate::marketplace::PluginManifest, + bundle_dir: &Path, + base_real: &Path, + ) -> Result { + let active_dir = plugin_active_dir(&self.plugin_dir); + plugin_paths::ensure_dir_under(base_real, &active_dir, "active").await?; + + let entrypoint_path = bundle_dir.join(&manifest.entrypoint); + let record = ActivePluginRecord { + plugin_id: manifest.id.clone(), + version: manifest.version.clone(), + node_kind: manifest.node_kind.clone(), + kind: manifest.kind.clone(), + entrypoint: entrypoint_path.to_string_lossy().into_owned(), + installed_at_ms: now_ms(), + }; + let record_path = plugin_record_path(&self.plugin_dir, &manifest.id)?; + let payload = serde_json::to_vec_pretty(&record) + .context("Failed to serialize active plugin record")?; + tokio::fs::write(&record_path, payload).await.with_context(|| { + format!( + "Failed to write active plugin record {record_path}", + record_path = record_path.display() + ) + })?; + + Ok(entrypoint_path) + } + + async fn load_plugin( + &self, + manifest: &crate::marketplace::PluginManifest, + entrypoint_path: &Path, + expected_kind: &str, + ) -> Result { + let plugin_type = match manifest.kind { + PluginKind::Wasm => PluginType::Wasm, + PluginKind::Native => PluginType::Native, + }; + let entrypoint_path = entrypoint_path.to_path_buf(); + let manager = Arc::clone(&self.plugin_manager); + let expected_kind_owned = expected_kind.to_string(); + + let unloaded = tokio::task::spawn_blocking({ + let manager = Arc::clone(&manager); + let expected_kind = expected_kind_owned.clone(); + move || -> anyhow::Result { + let mut mgr = manager.blocking_lock(); + let unloaded = if mgr.is_plugin_loaded(&expected_kind) { + mgr.unload_plugin(&expected_kind, false)?; + true + } else { + false + }; + drop(mgr); + Ok(unloaded) + } + }) + .await + .context("Plugin unload task failed")??; + if unloaded { + info!(plugin = %expected_kind_owned, "Unloaded existing plugin before install"); + } + + let summary = tokio::task::spawn_blocking(move || { + let mut mgr = manager.blocking_lock(); + mgr.load_from_path(plugin_type, entrypoint_path) + }) + .await + .context("Plugin load task failed")??; + + if summary.kind != expected_kind { + let manager = Arc::clone(&self.plugin_manager); + let summary_kind = summary.kind.clone(); + let _ = tokio::task::spawn_blocking(move || { + let mut mgr = manager.blocking_lock(); + let _ = mgr.unload_plugin(&summary_kind, true); + }) + .await; + + let actual_kind = summary.kind.as_str(); + return Err(anyhow!( + "Loaded plugin kind '{actual_kind}' does not match manifest kind '{expected_kind}'" + ) + .into()); + } + + Ok(summary) + } + + async fn download_models( + &self, + manifest: &crate::marketplace::PluginManifest, + model_ids: Option<&[String]>, + tracker: &JobTracker, + cancel: &CancellationToken, + registry_origin: Option<&OriginKey>, + ) -> Result<(), InstallError> { + tokio::fs::create_dir_all(&self.models_dir).await.with_context(|| { + format!( + "Failed to create models dir {models_dir}", + models_dir = self.models_dir.display() + ) + })?; + + let selected_models = select_models(&manifest.models, model_ids)?; + let items_total = selected_models + .iter() + .map(|model| match &model.source { + crate::marketplace::ModelSource::Huggingface { files, .. } => files.len() as u64, + crate::marketplace::ModelSource::Url { .. } => 1, + }) + .sum::(); + + let mut items_done = 0u64; + + for model in selected_models { + if cancel.is_cancelled() { + return Err(InstallError::Cancelled); + } + + let expected_bytes = if model.expected_size_bytes.is_some() && items_total == 1 { + model.expected_size_bytes + } else { + None + }; + + match &model.source { + crate::marketplace::ModelSource::Huggingface { repo_id, revision, files } => { + if model.gated && self.huggingface_token.is_none() { + return Err(anyhow!( + "Hugging Face token is required for gated model downloads" + ) + .into()); + } + let revision = revision.as_deref().unwrap_or("main"); + let expected_sha256 = + if files.len() == 1 { model.sha256.as_deref() } else { None }; + for file in files { + if cancel.is_cancelled() { + return Err(InstallError::Cancelled); + } + let file_path = Path::new(file); + if !is_safe_relative_path(file_path) { + return Err(anyhow!("Invalid model file path '{file}'").into()); + } + let target_path = self.models_dir.join(file_path); + let display_name = file.as_str(); + let url = huggingface_model_url(repo_id, revision, file)?; + self.download_model_file( + DownloadModelRequest { + url: &url, + target_path: &target_path, + display_name, + items_done, + items_total, + expected_size: expected_bytes, + expected_sha256, + bearer_token: self.huggingface_token.as_deref(), + registry_origin: None, + }, + tracker, + cancel, + ) + .await?; + items_done = items_done.saturating_add(1); + } + }, + crate::marketplace::ModelSource::Url { url } => { + if cancel.is_cancelled() { + return Err(InstallError::Cancelled); + } + if !self.allow_model_urls { + return Err( + anyhow!("Model URL downloads are disabled by configuration").into() + ); + } + let parsed = self + .marketplace_policy + .validate_url("model url", url, registry_origin) + .await?; + let file_name = parsed + .path_segments() + .and_then(|mut segments| segments.next_back()) + .filter(|name| !name.is_empty()) + .ok_or_else(|| anyhow!("Could not determine file name from model URL"))?; + let file_path = Path::new(file_name); + if !is_safe_relative_path(file_path) { + return Err(anyhow!("Invalid model file name '{file_name}'").into()); + } + let target_path = self.models_dir.join(file_path); + let display_name = file_name; + self.download_model_file( + DownloadModelRequest { + url: parsed.as_str(), + target_path: &target_path, + display_name, + items_done, + items_total, + expected_size: expected_bytes, + expected_sha256: model.sha256.as_deref(), + bearer_token: None, + registry_origin: registry_origin.cloned(), + }, + tracker, + cancel, + ) + .await?; + items_done = items_done.saturating_add(1); + }, + } + + tracker + .update_progress( + STEP_DOWNLOAD_MODELS, + JobProgress { + items_done: Some(items_done), + items_total: Some(items_total), + ..JobProgress::default() + }, + ) + .await; + } + + Ok(()) + } + + async fn download_model_file( + &self, + request: DownloadModelRequest<'_>, + tracker: &JobTracker, + cancel: &CancellationToken, + ) -> Result<(), InstallError> { + let DownloadModelRequest { + url, + target_path, + display_name, + items_done, + items_total, + expected_size, + expected_sha256, + bearer_token, + registry_origin, + } = request; + if target_path.exists() { + self.maybe_extract_model_archive(target_path, cancel).await?; + tracker + .update_progress( + STEP_DOWNLOAD_MODELS, + JobProgress { + items_done: Some(items_done.saturating_add(1)), + items_total: Some(items_total), + current_item: Some(display_name.to_owned()), + ..JobProgress::default() + }, + ) + .await; + return Ok(()); + } + + if let Some(parent) = target_path.parent() { + tokio::fs::create_dir_all(parent).await.with_context(|| { + format!("Failed to create model dir {parent}", parent = parent.display()) + })?; + } + + let parsed = self + .marketplace_policy + .validate_url("model url", url, registry_origin.as_ref()) + .await?; + let (response, final_url) = validated_get_response( + &self.download_client, + &self.marketplace_policy, + "model url", + &parsed, + registry_origin.as_ref(), + bearer_token, + ) + .await?; + let response = response + .error_for_status() + .with_context(|| format!("Model download failed for {final_url}"))?; + let total_bytes = response.content_length().or(expected_size); + let mut stream = response.bytes_stream(); + + let temp_path = target_path.with_extension(format!("download-{}", Uuid::new_v4())); + let download_result: Result<(), InstallError> = async { + let mut file = tokio::fs::File::create(&temp_path).await.with_context(|| { + format!("Failed to create model file {temp_path}", temp_path = temp_path.display()) + })?; + + let mut hasher = expected_sha256.map(|_| Sha256::new()); + let mut bytes_done = 0u64; + + while let Some(chunk) = stream.next().await { + let chunk = chunk.with_context(|| "Failed to read model download stream")?; + if cancel.is_cancelled() { + return Err(InstallError::Cancelled); + } + file.write_all(&chunk).await.with_context(|| { + format!( + "Failed to write model file {temp_path}", + temp_path = temp_path.display() + ) + })?; + if let Some(ref mut hasher) = hasher { + hasher.update(&chunk); + } + bytes_done = bytes_done.saturating_add(chunk.len() as u64); + + tracker + .update_progress( + STEP_DOWNLOAD_MODELS, + JobProgress { + bytes_done: Some(bytes_done), + bytes_total: total_bytes, + items_done: Some(items_done), + items_total: Some(items_total), + current_item: Some(display_name.to_owned()), + ..JobProgress::default() + }, + ) + .await; + } + + file.flush().await.with_context(|| { + format!("Failed to flush model file {temp_path}", temp_path = temp_path.display()) + })?; + file.sync_all().await.with_context(|| { + format!("Failed to sync model file {temp_path}", temp_path = temp_path.display()) + })?; + + if let (Some(expected_hash), Some(hasher)) = (expected_sha256, hasher) { + let actual_hash = to_hex(&hasher.finalize()); + if !actual_hash.eq_ignore_ascii_case(expected_hash) { + return Err(anyhow!( + "Model hash mismatch: expected {expected_hash}, got {actual_hash}" + ) + .into()); + } + } + + Ok(()) + } + .await; + + if let Err(err) = download_result { + let _ = tokio::fs::remove_file(&temp_path).await; + return Err(err); + } + + if let Err(err) = tokio::fs::rename(&temp_path, target_path).await { + let _ = tokio::fs::remove_file(&temp_path).await; + return Err(anyhow!( + "Failed to move model file to {target_path}: {err}", + target_path = target_path.display() + ) + .into()); + } + + self.maybe_extract_model_archive(target_path, cancel).await?; + + Ok(()) + } + + async fn maybe_extract_model_archive( + &self, + archive_path: &Path, + cancel: &CancellationToken, + ) -> Result<(), InstallError> { + let Some(kind) = model_archive_kind(archive_path) else { + return Ok(()); + }; + if let Some(dir) = model_archive_dir(archive_path, &self.models_dir) { + if dir.exists() { + return Ok(()); + } + } + if cancel.is_cancelled() { + return Err(InstallError::Cancelled); + } + tokio::fs::create_dir_all(&self.models_dir).await.with_context(|| { + format!( + "Failed to create models dir {models_dir}", + models_dir = self.models_dir.display() + ) + })?; + + let archive_path = archive_path.to_path_buf(); + let models_dir = self.models_dir.clone(); + let cancel_clone = cancel.clone(); + let extraction = tokio::task::spawn_blocking(move || -> Result<(), anyhow::Error> { + let file = std::fs::File::open(&archive_path).with_context(|| { + format!( + "Failed to open model archive {archive_path}", + archive_path = archive_path.display() + ) + })?; + let reader: Box = match kind { + ModelArchiveKind::TarZst => Box::new(zstd::stream::read::Decoder::new(file)?), + ModelArchiveKind::TarGz => Box::new(flate2::read::GzDecoder::new(file)), + ModelArchiveKind::TarBz2 => Box::new(bzip2::read::BzDecoder::new(file)), + ModelArchiveKind::Tar => Box::new(file), + }; + safe_extract_archive(reader, &models_dir, Some(&cancel_clone))?; + Ok(()) + }) + .await + .context("Model archive extraction task failed"); + + if cancel.is_cancelled() { + return Err(InstallError::Cancelled); + } + + match extraction { + Ok(Ok(())) => Ok(()), + Ok(Err(err)) | Err(err) => Err(err.into()), + } + } +} + +#[derive(Debug)] +enum InstallError { + Cancelled, + Other(anyhow::Error), +} + +impl From for InstallError { + fn from(err: anyhow::Error) -> Self { + Self::Other(err) + } +} + +fn install_steps() -> Vec { + vec![ + JobStep { + name: STEP_DOWNLOAD_MANIFEST.to_string(), + status: StepStatus::Pending, + progress: None, + error: None, + }, + JobStep { + name: STEP_VERIFY_SIGNATURE.to_string(), + status: StepStatus::Pending, + progress: None, + error: None, + }, + JobStep { + name: STEP_DOWNLOAD_BUNDLE.to_string(), + status: StepStatus::Pending, + progress: None, + error: None, + }, + JobStep { + name: STEP_EXTRACT_BUNDLE.to_string(), + status: StepStatus::Pending, + progress: None, + error: None, + }, + JobStep { + name: STEP_ACTIVATE.to_string(), + status: StepStatus::Pending, + progress: None, + error: None, + }, + JobStep { + name: STEP_LOAD_PLUGIN.to_string(), + status: StepStatus::Pending, + progress: None, + error: None, + }, + JobStep { + name: STEP_DOWNLOAD_MODELS.to_string(), + status: StepStatus::Pending, + progress: None, + error: None, + }, + ] +} + +fn select_registry_version<'a>( + index: &'a RegistryIndex, + plugin_id: &str, + version: Option<&str>, +) -> Result<&'a crate::marketplace::RegistryPluginVersion> { + let plugin = index + .plugins + .iter() + .find(|entry| entry.id == plugin_id) + .ok_or_else(|| anyhow!("Plugin '{plugin_id}' not found in registry"))?; + let version = match version { + Some(v) => v, + None => plugin.latest.as_deref().ok_or_else(|| { + anyhow!("Registry does not specify a latest version for '{plugin_id}'") + })?, + }; + plugin + .versions + .iter() + .find(|entry| entry.version == version) + .ok_or_else(|| anyhow!("Version '{version}' not found for plugin '{plugin_id}'")) +} + +fn select_models<'a>( + models: &'a [crate::marketplace::ModelSpec], + model_ids: Option<&[String]>, +) -> Result> { + let Some(model_ids) = model_ids else { + return Ok(models.iter().collect()); + }; + if model_ids.is_empty() { + return Err(anyhow!("Model selection cannot be empty")); + } + + let mut by_id = HashMap::new(); + for model in models { + if let Some(id) = model.id.as_deref() { + if by_id.insert(id, model).is_some() { + return Err(anyhow!("Duplicate model id '{id}' in manifest")); + } + } + } + if by_id.is_empty() { + return Err(anyhow!("Model selection requires manifest models to include ids")); + } + + let mut selected = Vec::new(); + let mut seen = HashSet::new(); + for id in model_ids { + if !seen.insert(id.as_str()) { + continue; + } + let Some(model) = by_id.get(id.as_str()) else { + return Err(anyhow!("Unknown model id '{id}'")); + }; + selected.push(*model); + } + + Ok(selected) +} + +fn namespaced_kind(manifest: &crate::marketplace::PluginManifest) -> String { + match manifest.kind { + PluginKind::Wasm => format!("plugin::wasm::{node_kind}", node_kind = manifest.node_kind), + PluginKind::Native => { + format!("plugin::native::{node_kind}", node_kind = manifest.node_kind) + }, + } +} + +fn validate_manifest_compatibility(manifest: &crate::marketplace::PluginManifest) -> Result<()> { + let Some(compatibility) = &manifest.compatibility else { + return Ok(()); + }; + + if !compatibility.os.is_empty() { + let current_os = std::env::consts::OS; + if !compatibility.os.iter().any(|entry| entry.eq_ignore_ascii_case(current_os)) { + return Err(anyhow!("Plugin is not compatible with OS '{current_os}'")); + } + } + + if !compatibility.arch.is_empty() { + let current_arch = std::env::consts::ARCH; + if !compatibility.arch.iter().any(|entry| entry.eq_ignore_ascii_case(current_arch)) { + return Err(anyhow!("Plugin is not compatible with architecture '{current_arch}'")); + } + } + + if let Some(requirement) = compatibility.streamkit.as_deref() { + let requirement = VersionReq::parse(requirement).with_context(|| { + format!("Invalid streamkit compatibility requirement '{requirement}'") + })?; + let current = + Version::parse(env!("CARGO_PKG_VERSION")).context("Invalid StreamKit version")?; + if !requirement.matches(¤t) { + return Err(anyhow!( + "Plugin requires StreamKit {requirement}, current version is {current}" + )); + } + } + + Ok(()) +} + +fn validate_entrypoint(entrypoint: &str) -> Result<()> { + let path = Path::new(entrypoint); + if path.as_os_str().is_empty() { + return Err(anyhow!("Entrypoint must not be empty")); + } + if path.is_absolute() { + return Err(anyhow!("Entrypoint must be a relative path")); + } + if !is_safe_relative_path(path) { + return Err(anyhow!("Entrypoint contains invalid path segments")); + } + Ok(()) +} + +fn validate_entrypoint_for_kind(kind: &PluginKind, entrypoint_path: &Path) -> Result<()> { + let extension = entrypoint_path.extension().and_then(|ext| ext.to_str()); + match kind { + PluginKind::Wasm => { + if extension != Some("wasm") { + return Err(anyhow!("WASM entrypoint must have .wasm extension")); + } + }, + PluginKind::Native => { + if extension != Some("so") && extension != Some("dylib") && extension != Some("dll") { + return Err(anyhow!("Native entrypoint must be a shared library")); + } + }, + } + Ok(()) +} + +fn safe_extract_archive( + reader: R, + dest: &Path, + cancel: Option<&CancellationToken>, +) -> Result<()> { + let mut archive = tar::Archive::new(reader); + for entry in archive.entries().context("Failed to read bundle archive")? { + if let Some(cancel) = cancel { + if cancel.is_cancelled() { + return Err(anyhow!("Bundle extraction cancelled")); + } + } + let mut entry = entry.context("Failed to read archive entry")?; + let path = entry.path().context("Failed to read entry path")?.to_path_buf(); + if !is_safe_relative_path(&path) { + let path_display = path.display(); + return Err(anyhow!("Unsafe path in bundle: {path_display}")); + } + + let entry_type = entry.header().entry_type(); + if entry_type == tar::EntryType::Symlink || entry_type == tar::EntryType::Link { + return Err(anyhow!("Symlinks and hardlinks are not allowed in bundles")); + } + + let target = dest.join(&path); + if entry_type != tar::EntryType::Directory { + if let Some(parent) = target.parent() { + std::fs::create_dir_all(parent).with_context(|| { + format!("Failed to create parent directory {parent}", parent = parent.display()) + })?; + } + } + entry + .unpack(&target) + .with_context(|| format!("Failed to extract {target}", target = target.display()))?; + } + + Ok(()) +} + +#[derive(Clone, Copy, Debug)] +enum ModelArchiveKind { + Tar, + TarGz, + TarBz2, + TarZst, +} + +fn model_archive_kind(path: &Path) -> Option { + let ext = path.extension()?.to_str()?; + if ext.eq_ignore_ascii_case("tar") { + return Some(ModelArchiveKind::Tar); + } + if ext.eq_ignore_ascii_case("tgz") { + return Some(ModelArchiveKind::TarGz); + } + if ext.eq_ignore_ascii_case("tbz2") { + return Some(ModelArchiveKind::TarBz2); + } + if ext.eq_ignore_ascii_case("tzst") { + return Some(ModelArchiveKind::TarZst); + } + if ext.eq_ignore_ascii_case("gz") + && path + .file_stem() + .and_then(|stem| Path::new(stem).extension()) + .is_some_and(|ext| ext.eq_ignore_ascii_case("tar")) + { + return Some(ModelArchiveKind::TarGz); + } + if ext.eq_ignore_ascii_case("bz2") + && path + .file_stem() + .and_then(|stem| Path::new(stem).extension()) + .is_some_and(|ext| ext.eq_ignore_ascii_case("tar")) + { + return Some(ModelArchiveKind::TarBz2); + } + if ext.eq_ignore_ascii_case("zst") + && path + .file_stem() + .and_then(|stem| Path::new(stem).extension()) + .is_some_and(|ext| ext.eq_ignore_ascii_case("tar")) + { + return Some(ModelArchiveKind::TarZst); + } + None +} + +fn model_archive_dir(path: &Path, base_dir: &Path) -> Option { + let kind = model_archive_kind(path)?; + let file_stem = path.file_stem()?.to_str()?; + let base = match kind { + ModelArchiveKind::Tar => file_stem.to_string(), + ModelArchiveKind::TarGz | ModelArchiveKind::TarBz2 | ModelArchiveKind::TarZst => { + let stem_path = Path::new(file_stem); + if stem_path.extension().is_some_and(|ext| ext.eq_ignore_ascii_case("tar")) { + stem_path.file_stem()?.to_str()?.to_string() + } else { + file_stem.to_string() + } + }, + }; + Some(base_dir.join(base)) +} + +fn is_safe_relative_path(path: &Path) -> bool { + if path.is_absolute() { + return false; + } + path.components().all(|component| match component { + Component::Normal(_) | Component::CurDir => true, + Component::ParentDir | Component::RootDir | Component::Prefix(_) => false, + }) +} + +fn huggingface_model_url(repo_id: &str, revision: &str, file: &str) -> Result { + let mut url = reqwest::Url::parse("https://huggingface.co")?; + { + let mut segments = url + .path_segments_mut() + .map_err(|()| anyhow!("Failed to build Hugging Face model URL"))?; + for segment in repo_id.split('/').filter(|segment| !segment.is_empty()) { + segments.push(segment); + } + segments.push("resolve"); + for segment in revision.split('/').filter(|segment| !segment.is_empty()) { + segments.push(segment); + } + for segment in file.split('/').filter(|segment| !segment.is_empty()) { + segments.push(segment); + } + } + Ok(url.to_string()) +} + +fn to_hex(bytes: &[u8]) -> String { + let mut out = String::with_capacity(bytes.len() * 2); + for byte in bytes { + let _ = write!(&mut out, "{byte:02x}"); + } + out +} + +fn now_ms() -> u128 { + SystemTime::now().duration_since(UNIX_EPOCH).map(|duration| duration.as_millis()).unwrap_or(0) +} + +#[cfg(test)] +mod tests { + use super::*; + use anyhow::{anyhow, bail, Context, Result}; + use std::sync::Arc; + + use crate::plugins::UnifiedPluginManager; + use axum::{routing::get, Router}; + use bytes::Bytes; + use sha2::Sha256; + use tokio::net::TcpListener; + use tokio::sync::oneshot; + use tokio::task::JoinHandle; + + fn make_job(status: JobStatus) -> InstallJob { + InstallJob { + info: JobInfo { + status, + started_at_ms: None, + updated_at_ms: now_ms(), + summary: "test".to_string(), + steps: install_steps(), + }, + cancel: CancellationToken::new(), + request: InstallPluginRequest { + registry: "registry".to_string(), + plugin_id: "plugin".to_string(), + version: None, + install_models: false, + model_ids: None, + }, + permissions: Permissions::default(), + } + } + + #[test] + fn prune_jobs_drops_oldest_queued_when_over_cap() { + let mut state = InstallQueueState::default(); + let total = MAX_JOB_HISTORY + 2; + + for idx in 0..total { + let is_queued = idx != 0; + let status = if is_queued { JobStatus::Queued } else { JobStatus::Running }; + let job_id = format!("job-{idx}"); + state.jobs.insert(job_id.clone(), make_job(status)); + state.job_order.push_back(job_id.clone()); + if is_queued { + state.queue.push_back(job_id); + } + } + + state.prune_jobs(); + + assert_eq!(state.jobs.len(), MAX_JOB_HISTORY); + assert!(state.jobs.contains_key("job-0")); + assert!(!state.jobs.contains_key("job-1")); + assert!(!state.jobs.contains_key("job-2")); + assert_eq!(state.queue.len(), MAX_JOB_HISTORY - 1); + } + + fn test_plugin_manager(plugin_dir: &Path) -> Result { + let resource_manager = + Arc::new(streamkit_core::ResourceManager::new(streamkit_core::ResourcePolicy { + keep_loaded: true, + max_memory_mb: None, + })); + let engine = + Arc::new(streamkit_engine::Engine::with_resource_manager(resource_manager.clone())); + let wasm_dir = plugin_dir.join("wasm"); + let native_dir = plugin_dir.join("native"); + let manager = UnifiedPluginManager::new( + engine, + resource_manager, + plugin_dir.to_path_buf(), + wasm_dir, + native_dir, + )?; + Ok(Arc::new(tokio::sync::Mutex::new(manager))) + } + + async fn start_file_server( + payload: Bytes, + ) -> Result<(std::net::SocketAddr, oneshot::Sender<()>, JoinHandle>)> { + start_file_server_with_path("/model.bin", payload).await + } + + async fn start_file_server_with_path( + path: &str, + payload: Bytes, + ) -> Result<(std::net::SocketAddr, oneshot::Sender<()>, JoinHandle>)> { + let path = path.to_string(); + let app = Router::new().route( + path.as_str(), + get(move || { + let payload = payload.clone(); + async move { + ([(axum::http::header::CONTENT_TYPE, "application/octet-stream")], payload) + } + }), + ); + let listener = TcpListener::bind("127.0.0.1:0").await?; + let addr = listener.local_addr()?; + let (shutdown_tx, shutdown_rx) = oneshot::channel(); + let handle = tokio::spawn(async move { + axum::serve(listener, app.into_make_service()) + .with_graceful_shutdown(async move { + let _ = shutdown_rx.await; + }) + .await + .context("serve test server")?; + Ok(()) + }); + Ok((addr, shutdown_tx, handle)) + } + + fn test_manifest( + models: Vec, + ) -> crate::marketplace::PluginManifest { + crate::marketplace::PluginManifest { + schema_version: 1, + id: "test".to_string(), + name: None, + version: "1.0.0".to_string(), + node_kind: "test".to_string(), + kind: PluginKind::Native, + description: None, + license: None, + license_url: None, + homepage: None, + repository: None, + entrypoint: "libtest.so".to_string(), + bundle: crate::marketplace::PluginBundle { + url: "http://example.com/bundle.tar.zst".to_string(), + sha256: "deadbeef".to_string(), + size_bytes: None, + }, + compatibility: None, + models, + } + } + + #[tokio::test] + async fn download_model_from_url() -> Result<()> { + let payload = Bytes::from_static(b"model-bytes"); + let (addr, shutdown_tx, server_handle) = match start_file_server(payload.clone()).await { + Ok(values) => values, + Err(err) => { + if let Some(io_err) = err.downcast_ref::() { + if io_err.kind() == std::io::ErrorKind::PermissionDenied { + tracing::warn!(error = %err, "Skipping model download test"); + return Ok(()); + } + } + return Err(err); + }, + }; + let url = format!("http://{addr}/model.bin"); + + let temp_dir = tempfile::tempdir()?; + let plugin_dir = temp_dir.path().join("plugins"); + tokio::fs::create_dir_all(&plugin_dir).await?; + + let mut hasher = Sha256::new(); + hasher.update(&payload); + let hash = to_hex(&hasher.finalize()); + + let config = PluginConfig { + directory: plugin_dir.to_string_lossy().to_string(), + http_management: crate::config::PluginHttpConfig { allow_http_management: false }, + marketplace: crate::config::PluginMarketplaceConfig { + marketplace_enabled: true, + allow_native_marketplace: true, + security: crate::config::PluginMarketplaceSecurityConfig { + allow_model_urls: true, + marketplace_scheme_policy: crate::config::MarketplaceSchemePolicy::AllowHttp, + marketplace_host_policy: crate::config::MarketplaceHostPolicy::AllowPrivate, + marketplace_url_allowlist: vec!["http://127.0.0.1:*".to_string()], + ..crate::config::PluginMarketplaceSecurityConfig::default() + }, + }, + trusted_pubkeys: Vec::new(), + registries: Vec::new(), + models_dir: Some(temp_dir.path().join("models").to_string_lossy().to_string()), + huggingface_token: None, + }; + + let queue = InstallJobQueue::new(&config, test_plugin_manager(&plugin_dir)?)?; + let manifest = test_manifest(vec![crate::marketplace::ModelSpec { + id: None, + name: None, + default: false, + source: crate::marketplace::ModelSource::Url { url: url.clone() }, + expected_size_bytes: Some(payload.len() as u64), + sha256: Some(hash), + license: None, + license_url: None, + gated: false, + }]); + let tracker = JobTracker { job_id: "test".to_string(), queue: queue.clone() }; + let cancel = CancellationToken::new(); + + let registry_origin = + origin_key(&reqwest::Url::parse("https://registry.example.com/index.json")?)?; + queue + .installer + .download_models(&manifest, None, &tracker, &cancel, Some(®istry_origin)) + .await + .map_err(|err| match err { + InstallError::Cancelled => anyhow!("download cancelled"), + InstallError::Other(err) => err, + })?; + + let target_path = temp_dir.path().join("models").join("model.bin"); + let downloaded = tokio::fs::read(&target_path).await?; + assert_eq!(downloaded, payload); + + let _ = shutdown_tx.send(()); + server_handle.await.context("file server task panicked")??; + + Ok(()) + } + + #[tokio::test] + async fn download_model_archive_extracts() -> Result<()> { + let mut tar_bytes = Vec::new(); + { + let encoder = bzip2::write::BzEncoder::new(&mut tar_bytes, bzip2::Compression::best()); + let mut builder = tar::Builder::new(encoder); + let mut header = tar::Header::new_gnu(); + let contents = b"model-data"; + header.set_size(contents.len() as u64); + header.set_mode(0o644); + header.set_cksum(); + builder.append_data(&mut header, "model-dir/model.txt", &contents[..])?; + let encoder = builder.into_inner()?; + encoder.finish()?; + } + + let payload = Bytes::from(tar_bytes); + let (addr, shutdown_tx, server_handle) = + match start_file_server_with_path("/model.tar.bz2", payload.clone()).await { + Ok(values) => values, + Err(err) => { + if let Some(io_err) = err.downcast_ref::() { + if io_err.kind() == std::io::ErrorKind::PermissionDenied { + tracing::warn!(error = %err, "Skipping model archive test"); + return Ok(()); + } + } + return Err(err); + }, + }; + let url = format!("http://{addr}/model.tar.bz2"); + + let temp_dir = tempfile::tempdir()?; + let plugin_dir = temp_dir.path().join("plugins"); + tokio::fs::create_dir_all(&plugin_dir).await?; + + let mut hasher = Sha256::new(); + hasher.update(&payload); + let hash = to_hex(&hasher.finalize()); + + let config = PluginConfig { + directory: plugin_dir.to_string_lossy().to_string(), + http_management: crate::config::PluginHttpConfig { allow_http_management: false }, + marketplace: crate::config::PluginMarketplaceConfig { + marketplace_enabled: true, + allow_native_marketplace: true, + security: crate::config::PluginMarketplaceSecurityConfig { + allow_model_urls: true, + marketplace_scheme_policy: crate::config::MarketplaceSchemePolicy::AllowHttp, + marketplace_host_policy: crate::config::MarketplaceHostPolicy::AllowPrivate, + marketplace_url_allowlist: vec!["http://127.0.0.1:*".to_string()], + ..crate::config::PluginMarketplaceSecurityConfig::default() + }, + }, + trusted_pubkeys: Vec::new(), + registries: Vec::new(), + models_dir: Some(temp_dir.path().join("models").to_string_lossy().to_string()), + huggingface_token: None, + }; + + let queue = InstallJobQueue::new(&config, test_plugin_manager(&plugin_dir)?)?; + let manifest = test_manifest(vec![crate::marketplace::ModelSpec { + id: None, + name: None, + default: false, + source: crate::marketplace::ModelSource::Url { url: url.clone() }, + expected_size_bytes: Some(payload.len() as u64), + sha256: Some(hash), + license: None, + license_url: None, + gated: false, + }]); + let tracker = JobTracker { job_id: "test".to_string(), queue: queue.clone() }; + let cancel = CancellationToken::new(); + + let registry_origin = + origin_key(&reqwest::Url::parse("https://registry.example.com/index.json")?)?; + queue + .installer + .download_models(&manifest, None, &tracker, &cancel, Some(®istry_origin)) + .await + .map_err(|err| match err { + InstallError::Cancelled => anyhow!("download cancelled"), + InstallError::Other(err) => err, + })?; + + let extracted_path = temp_dir.path().join("models/model-dir/model.txt"); + let extracted = tokio::fs::read(&extracted_path).await?; + assert_eq!(extracted, b"model-data"); + + let _ = shutdown_tx.send(()); + server_handle.await.context("file server task panicked")??; + + Ok(()) + } + + #[tokio::test] + async fn gated_models_require_token() -> Result<()> { + let temp_dir = tempfile::tempdir()?; + let plugin_dir = temp_dir.path().join("plugins"); + tokio::fs::create_dir_all(&plugin_dir).await?; + + let config = PluginConfig { + directory: plugin_dir.to_string_lossy().to_string(), + http_management: crate::config::PluginHttpConfig { allow_http_management: false }, + marketplace: crate::config::PluginMarketplaceConfig { + marketplace_enabled: true, + allow_native_marketplace: true, + security: crate::config::PluginMarketplaceSecurityConfig { + allow_model_urls: false, + ..crate::config::PluginMarketplaceSecurityConfig::default() + }, + }, + trusted_pubkeys: Vec::new(), + registries: Vec::new(), + models_dir: Some(temp_dir.path().join("models").to_string_lossy().to_string()), + huggingface_token: None, + }; + + let queue = InstallJobQueue::new(&config, test_plugin_manager(&plugin_dir)?)?; + let manifest = test_manifest(vec![crate::marketplace::ModelSpec { + id: None, + name: None, + default: false, + source: crate::marketplace::ModelSource::Huggingface { + repo_id: "test/repo".to_string(), + revision: None, + files: vec!["model.bin".to_string()], + }, + expected_size_bytes: None, + sha256: None, + license: None, + license_url: None, + gated: true, + }]); + let tracker = JobTracker { job_id: "test".to_string(), queue: queue.clone() }; + let cancel = CancellationToken::new(); + + let Err(err) = + queue.installer.download_models(&manifest, None, &tracker, &cancel, None).await + else { + bail!("expected gated model error"); + }; + let InstallError::Other(err) = err else { + bail!("expected InstallError::Other"); + }; + assert!(err.to_string().contains("token")); + + Ok(()) + } +} diff --git a/apps/skit/src/marketplace_security.rs b/apps/skit/src/marketplace_security.rs new file mode 100644 index 00000000..463be7d8 --- /dev/null +++ b/apps/skit/src/marketplace_security.rs @@ -0,0 +1,354 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +use std::fmt::Write; +use std::net::IpAddr; +use std::path::Path; + +use anyhow::{anyhow, Context, Result}; +use bytes::Bytes; +use reqwest::{header::LOCATION, Client, Response, Url}; +use tracing::warn; + +use crate::config::{MarketplaceHostPolicy, MarketplaceSchemePolicy, PluginConfig}; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct OriginKey { + scheme: String, + host: String, + port: u16, +} + +#[derive(Clone, Debug)] +pub struct MarketplaceUrlPolicy { + allowed_origins: Vec, + require_registry_origin: bool, + scheme_policy: MarketplaceSchemePolicy, + host_policy: MarketplaceHostPolicy, + resolve_hostnames: bool, +} + +pub const MAX_MARKETPLACE_REDIRECTS: usize = 5; + +impl MarketplaceUrlPolicy { + pub fn from_config(config: &PluginConfig) -> Self { + Self { + allowed_origins: config.marketplace.security.marketplace_url_allowlist.clone(), + require_registry_origin: config + .marketplace + .security + .marketplace_require_registry_origin, + scheme_policy: config.marketplace.security.marketplace_scheme_policy, + host_policy: config.marketplace.security.marketplace_host_policy, + resolve_hostnames: config.marketplace.security.marketplace_resolve_hostnames, + } + } + + /// Validates a marketplace URL against the configured policy. + /// + /// # Errors + /// + /// Returns an error if the URL is invalid, violates scheme/host restrictions, or fails + /// same-origin enforcement when required. + pub async fn validate_url( + &self, + label: &str, + url: &str, + registry_origin: Option<&OriginKey>, + ) -> Result { + let parsed = Url::parse(url).with_context(|| format!("Invalid {label} '{url}'"))?; + validate_scheme(label, &parsed, self.scheme_policy)?; + validate_host(label, &parsed, self.host_policy)?; + if self.resolve_hostnames { + validate_resolved_ips(label, &parsed, self.host_policy).await?; + } + + let origin = origin_key(&parsed)?; + let origin_display_str = origin_display(&origin); + let allowlist_key = origin_allowlist_key(&parsed)?; + let allowlisted = self + .allowed_origins + .iter() + .any(|pattern| origin_matches_pattern(&allowlist_key, pattern)); + + if self.require_registry_origin && !allowlisted { + if let Some(registry_origin) = registry_origin { + if &origin != registry_origin { + return Err(anyhow!( + "{label} origin {origin_display_str} does not match registry origin {registry_origin}", + registry_origin = origin_display(registry_origin) + )); + } + } + } + + Ok(parsed) + } +} + +/// Fetches a URL while validating every redirect hop against the marketplace policy. +/// +/// The initial URL must already be validated by the caller. +/// +/// # Errors +/// +/// Returns an error if a redirect is invalid, exceeds the redirect limit, or the request fails. +pub async fn validated_get_response( + client: &Client, + policy: &MarketplaceUrlPolicy, + label: &str, + start: &Url, + registry_origin: Option<&OriginKey>, + bearer_token: Option<&str>, +) -> Result<(Response, Url)> { + let mut current = start.clone(); + let token_origin = if bearer_token.is_some() { Some(origin_key(start)?) } else { None }; + + for redirect_count in 0..=MAX_MARKETPLACE_REDIRECTS { + let mut request = client.get(current.clone()); + if let (Some(token), Some(expected_origin)) = (bearer_token, token_origin.as_ref()) { + let current_origin = origin_key(¤t)?; + if ¤t_origin == expected_origin { + request = request.bearer_auth(token); + } + } + let response = + request.send().await.with_context(|| format!("Failed to fetch {label} {current}"))?; + if response.status().is_redirection() { + if redirect_count == MAX_MARKETPLACE_REDIRECTS { + return Err(anyhow!( + "{label} exceeded redirect limit ({MAX_MARKETPLACE_REDIRECTS})" + )); + } + let location = response + .headers() + .get(LOCATION) + .ok_or_else(|| anyhow!("{label} redirect missing Location header"))?; + let location = location.to_str().with_context(|| { + format!("{label} redirect location is not valid UTF-8: {location:?}") + })?; + let next = current + .join(location) + .with_context(|| format!("Invalid redirect URL '{location}' for {label}"))?; + let validated = policy.validate_url(label, next.as_str(), registry_origin).await?; + current = validated; + continue; + } + + return Ok((response, current)); + } + + Err(anyhow!("{label} exceeded redirect limit ({MAX_MARKETPLACE_REDIRECTS})")) +} + +/// Fetches a URL and returns the bytes after validating redirects. +/// +/// The initial URL must already be validated by the caller. +/// +/// # Errors +/// +/// Returns an error if the request fails, redirects are invalid, or the response cannot be read. +pub async fn validated_get_bytes( + client: &Client, + policy: &MarketplaceUrlPolicy, + label: &str, + start: &Url, + registry_origin: Option<&OriginKey>, + bearer_token: Option<&str>, +) -> Result { + let (response, final_url) = + validated_get_response(client, policy, label, start, registry_origin, bearer_token).await?; + let response = response + .error_for_status() + .with_context(|| format!("{label} request failed for {final_url}"))?; + response + .bytes() + .await + .with_context(|| format!("Failed to read {label} response body from {final_url}")) +} + +/// Builds an origin key from a URL. +/// +/// # Errors +/// +/// Returns an error if the URL is missing a host or does not have a known default port. +pub fn origin_key(url: &Url) -> Result { + let host = url.host_str().ok_or_else(|| anyhow!("URL is missing host"))?; + let port = url + .port_or_known_default() + .ok_or_else(|| anyhow!("URL scheme '{}' is missing a default port", url.scheme()))?; + Ok(OriginKey { scheme: url.scheme().to_string(), host: host.to_string(), port }) +} + +pub fn origin_display(origin: &OriginKey) -> String { + format!( + "{scheme}://{host}:{port}", + scheme = origin.scheme, + host = origin.host, + port = origin.port + ) +} + +fn origin_allowlist_key(url: &Url) -> Result { + let host = url.host_str().ok_or_else(|| anyhow!("URL is missing host"))?; + let mut origin = format!("{scheme}://{host}", scheme = url.scheme()); + if let Some(port) = url.port() { + let _ = write!(&mut origin, ":{port}"); + } + Ok(origin) +} + +fn origin_matches_pattern(origin: &str, pattern: &str) -> bool { + if pattern == "*" { + return true; + } + + if let Some(prefix_without_port) = pattern.strip_suffix(":*") { + if origin == prefix_without_port { + return true; + } + let Some(rest) = origin.strip_prefix(prefix_without_port) else { + return false; + }; + let Some(port_str) = rest.strip_prefix(':') else { + return false; + }; + return !port_str.is_empty() && port_str.chars().all(|c| c.is_ascii_digit()); + } + + origin == pattern +} + +fn validate_scheme(label: &str, url: &Url, policy: MarketplaceSchemePolicy) -> Result<()> { + match url.scheme() { + "https" => Ok(()), + "http" => match policy { + MarketplaceSchemePolicy::AllowHttp => Ok(()), + MarketplaceSchemePolicy::HttpsOnly => Err(anyhow!("{label} must use https")), + }, + other => Err(anyhow!("{label} has unsupported scheme '{other}'")), + } +} + +fn validate_host(label: &str, url: &Url, host_policy: MarketplaceHostPolicy) -> Result<()> { + let host = url.host_str().ok_or_else(|| anyhow!("{label} is missing host"))?; + if matches!(host_policy, MarketplaceHostPolicy::PublicOnly) { + let host_lower = host.to_ascii_lowercase(); + let is_local_tld = Path::new(&host_lower) + .extension() + .and_then(|ext| ext.to_str()) + .is_some_and(|ext| ext.eq_ignore_ascii_case("local")); + if host_lower == "localhost" || host_lower.ends_with(".localhost") || is_local_tld { + return Err(anyhow!("{label} host '{host}' is not allowed")); + } + } + + if let Ok(ip) = host.parse::() { + if is_blocked_ip(ip, host_policy) { + return Err(anyhow!("{label} host '{host}' is not allowed")); + } + } + + Ok(()) +} + +async fn validate_resolved_ips( + label: &str, + url: &Url, + host_policy: MarketplaceHostPolicy, +) -> Result<()> { + let host = url.host_str().ok_or_else(|| anyhow!("{label} is missing host"))?; + if host.parse::().is_ok() { + return Ok(()); + } + let port = url.port_or_known_default().ok_or_else(|| anyhow!("{label} is missing a port"))?; + + let lookup = tokio::net::lookup_host((host, port)).await; + let addrs = match lookup { + Ok(addrs) => addrs.collect::>(), + Err(err) => { + warn!(error = %err, host = %host, "Failed to resolve marketplace host"); + return Ok(()); + }, + }; + + if addrs.is_empty() { + warn!(host = %host, "Marketplace host resolved to no addresses"); + return Ok(()); + } + + for addr in addrs { + if is_blocked_ip(addr.ip(), host_policy) { + return Err(anyhow!("{label} resolved to blocked address {}", addr.ip())); + } + } + + Ok(()) +} + +const fn is_blocked_ip(ip: IpAddr, host_policy: MarketplaceHostPolicy) -> bool { + match ip { + IpAddr::V4(addr) => { + let is_private = addr.is_private() || addr.is_loopback() || addr.is_link_local(); + if matches!(host_policy, MarketplaceHostPolicy::PublicOnly) && is_private { + return true; + } + addr.is_unspecified() || addr.is_multicast() || addr.is_broadcast() + }, + IpAddr::V6(addr) => { + let is_private = + addr.is_loopback() || addr.is_unicast_link_local() || addr.is_unique_local(); + if matches!(host_policy, MarketplaceHostPolicy::PublicOnly) && is_private { + return true; + } + addr.is_unspecified() || addr.is_multicast() + }, + } +} + +#[cfg(test)] +mod tests { + use super::*; + use anyhow::{bail, Result}; + + fn test_policy() -> MarketplaceUrlPolicy { + MarketplaceUrlPolicy { + allowed_origins: Vec::new(), + require_registry_origin: true, + scheme_policy: MarketplaceSchemePolicy::HttpsOnly, + host_policy: MarketplaceHostPolicy::PublicOnly, + resolve_hostnames: false, + } + } + + #[tokio::test] + async fn rejects_insecure_marketplace_urls() -> Result<()> { + let policy = test_policy(); + let registry = + policy.validate_url("registry index", "https://example.com/index.json", None).await?; + let registry_origin = origin_key(®istry)?; + + match policy + .validate_url( + "manifest url", + "http://example.com/manifest.json", + Some(®istry_origin), + ) + .await + { + Ok(_) => bail!("expected https rejection"), + Err(err) => assert!(err.to_string().contains("https")), + } + + match policy + .validate_url("manifest url", "https://evil.com/manifest.json", Some(®istry_origin)) + .await + { + Ok(_) => bail!("expected origin rejection"), + Err(err) => assert!(err.to_string().contains("origin")), + } + + Ok(()) + } +} diff --git a/apps/skit/src/plugin_paths.rs b/apps/skit/src/plugin_paths.rs new file mode 100644 index 00000000..0ad83eb9 --- /dev/null +++ b/apps/skit/src/plugin_paths.rs @@ -0,0 +1,115 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +use std::path::{Component, Path, PathBuf}; + +use anyhow::{anyhow, Context, Result}; + +/// Validates a single path component used in plugin storage paths. +/// +/// # Errors +/// +/// Returns an error if the component is empty, contains whitespace, or is not a single path +/// segment. +pub fn validate_path_component(label: &str, value: &str) -> Result<()> { + if value.trim().is_empty() { + return Err(anyhow!("{label} must not be empty")); + } + if value != value.trim() { + return Err(anyhow!("{label} must not contain leading or trailing whitespace")); + } + + let mut components = Path::new(value).components(); + match (components.next(), components.next()) { + (Some(Component::Normal(_)), None) => Ok(()), + _ => Err(anyhow!("{label} must be a single path component")), + } +} + +/// Ensures the plugin base directory exists and returns its canonical path. +/// +/// # Errors +/// +/// Returns an error if the directory cannot be created or canonicalized. +pub async fn ensure_base_dir(base_dir: &Path) -> Result { + tokio::fs::create_dir_all(base_dir).await.with_context(|| { + format!("Failed to create plugin base dir {base_dir}", base_dir = base_dir.display()) + })?; + tokio::fs::canonicalize(base_dir).await.with_context(|| { + format!("Failed to resolve plugin base dir {base_dir}", base_dir = base_dir.display()) + }) +} + +/// Canonicalizes an existing plugin base directory. +/// +/// # Errors +/// +/// Returns an error if the directory cannot be canonicalized. +pub async fn canonicalize_existing_dir(base_dir: &Path) -> Result { + tokio::fs::canonicalize(base_dir).await.with_context(|| { + format!("Failed to resolve plugin base dir {base_dir}", base_dir = base_dir.display()) + }) +} + +/// Creates and canonicalizes a directory, ensuring it stays under the base dir. +/// +/// # Errors +/// +/// Returns an error if the directory cannot be created, canonicalized, or is outside the base +/// dir. +pub async fn ensure_dir_under(base_real: &Path, dir: &Path, label: &str) -> Result { + tokio::fs::create_dir_all(dir) + .await + .with_context(|| format!("Failed to create {label} dir {dir}", dir = dir.display()))?; + let dir_real = tokio::fs::canonicalize(dir) + .await + .with_context(|| format!("Failed to resolve {label} dir {dir}", dir = dir.display()))?; + ensure_under_base(base_real, &dir_real, label)?; + Ok(dir_real) +} + +/// Canonicalizes an existing directory and ensures it stays under the base dir. +/// +/// # Errors +/// +/// Returns an error if the directory cannot be canonicalized or is outside the base dir. +pub async fn ensure_existing_dir_under( + base_real: &Path, + dir: &Path, + label: &str, +) -> Result { + let dir_real = tokio::fs::canonicalize(dir) + .await + .with_context(|| format!("Failed to resolve {label} dir {dir}", dir = dir.display()))?; + ensure_under_base(base_real, &dir_real, label)?; + Ok(dir_real) +} + +fn ensure_under_base(base_real: &Path, dir_real: &Path, label: &str) -> Result<()> { + if !dir_real.starts_with(base_real) { + return Err(anyhow!( + "{label} dir {dir_real} is outside plugin directory {base_real}", + dir_real = dir_real.display(), + base_real = base_real.display() + )); + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::validate_path_component; + + #[test] + fn validate_path_component_rejects_unsafe_values() { + for value in ["../x", "a/b", "/tmp", "", " ", " x", "x "] { + assert!(validate_path_component("plugin id", value).is_err()); + } + } + + #[test] + fn validate_path_component_accepts_simple_name() { + assert!(validate_path_component("plugin id", "plugin-name").is_ok()); + } +} diff --git a/apps/skit/src/plugin_records.rs b/apps/skit/src/plugin_records.rs new file mode 100644 index 00000000..47e387bb --- /dev/null +++ b/apps/skit/src/plugin_records.rs @@ -0,0 +1,45 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +use std::path::{Path, PathBuf}; + +use anyhow::Result; +use serde::{Deserialize, Serialize}; + +use crate::{marketplace::PluginKind, plugin_paths}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ActivePluginRecord { + pub plugin_id: String, + pub version: String, + pub node_kind: String, + pub kind: PluginKind, + pub entrypoint: String, + pub installed_at_ms: u128, +} + +pub fn active_dir(plugin_dir: &Path) -> PathBuf { + plugin_dir.join("active") +} + +/// Builds the active plugin record path for a given plugin id. +/// +/// # Errors +/// +/// Returns an error if the plugin id is not a single safe path component. +pub fn record_path(plugin_dir: &Path, plugin_id: &str) -> Result { + plugin_paths::validate_path_component("plugin id", plugin_id)?; + Ok(active_dir(plugin_dir).join(format!("{plugin_id}.json"))) +} + +pub fn namespaced_kind(record: &ActivePluginRecord) -> String { + match record.kind { + PluginKind::Wasm => { + format!("plugin::wasm::{node_kind}", node_kind = record.node_kind) + }, + PluginKind::Native => { + format!("plugin::native::{node_kind}", node_kind = record.node_kind) + }, + } +} diff --git a/apps/skit/src/plugins.rs b/apps/skit/src/plugins.rs index 8af892bd..427541ce 100644 --- a/apps/skit/src/plugins.rs +++ b/apps/skit/src/plugins.rs @@ -20,6 +20,11 @@ use streamkit_plugin_wasm::{ use tokio::sync::Mutex; use tracing::{debug, info, warn}; +use crate::{ + marketplace::PluginKind, + plugin_paths, + plugin_records::{active_dir as plugin_active_dir, namespaced_kind as active_namespaced_kind}, +}; /// The type of plugin #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)] #[serde(rename_all = "lowercase")] @@ -37,6 +42,7 @@ pub struct PluginSummary { pub categories: Vec, pub loaded_at_ms: u128, pub plugin_type: PluginType, + pub version: Option, } impl PluginSummary { @@ -64,6 +70,7 @@ impl PluginSummary { categories: entry.categories.clone(), loaded_at_ms, plugin_type: entry.plugin_type, + version: entry.version.clone(), } } } @@ -81,6 +88,7 @@ struct ManagedPlugin { loaded_at: SystemTime, original_kind: String, plugin_type: PluginType, + version: Option, } impl ManagedPlugin { @@ -97,6 +105,7 @@ impl ManagedPlugin { loaded_at: SystemTime::now(), original_kind, plugin_type: PluginType::Wasm, + version: None, } } @@ -113,6 +122,7 @@ impl ManagedPlugin { loaded_at: SystemTime::now(), original_kind, plugin_type: PluginType::Native, + version: None, } } } @@ -121,6 +131,7 @@ impl ManagedPlugin { pub struct UnifiedPluginManager { wasm_runtime: PluginRuntime, plugins: HashMap, + plugin_base_dir: PathBuf, wasm_directory: PathBuf, native_directory: PathBuf, engine: Arc, @@ -142,6 +153,7 @@ impl UnifiedPluginManager { pub fn new( engine: Arc, resource_manager: Arc, + plugin_base_dir: PathBuf, wasm_directory: PathBuf, native_directory: PathBuf, ) -> Result { @@ -164,6 +176,7 @@ impl UnifiedPluginManager { Ok(Self { wasm_runtime, plugins: HashMap::new(), + plugin_base_dir, wasm_directory, native_directory, engine, @@ -241,6 +254,135 @@ impl UnifiedPluginManager { Ok(summaries) } + /// Load marketplace-managed plugins using active records. + fn load_active_plugins_from_dir(&mut self) -> Result> { + let active_dir = plugin_active_dir(&self.plugin_base_dir); + if !active_dir.exists() { + return Ok(Vec::new()); + } + + let base_real = std::fs::canonicalize(&self.plugin_base_dir).ok(); + let mut summaries = Vec::new(); + + for entry in std::fs::read_dir(&active_dir).with_context(|| { + format!("failed to read active plugins dir {}", active_dir.display()) + })? { + let entry = entry?; + let path = entry.path(); + if path.extension().and_then(|ext| ext.to_str()) != Some("json") { + continue; + } + + if let Some(summary) = self.load_active_plugin_record(&path, base_real.as_deref()) { + summaries.push(summary); + } + } + + Ok(summaries) + } + + fn load_active_plugin_record( + &mut self, + record_path: &Path, + base_real: Option<&Path>, + ) -> Option { + let record = Self::read_active_record(record_path)?; + if let Err(err) = plugin_paths::validate_path_component("plugin id", &record.plugin_id) { + warn!(error = %err, file = ?record_path, "Invalid plugin id in active record"); + return None; + } + if let Err(err) = plugin_paths::validate_path_component("plugin version", &record.version) { + warn!(error = %err, file = ?record_path, "Invalid plugin version in active record"); + return None; + } + let entrypoint_path = Self::validate_active_entrypoint(record_path, &record, base_real)?; + + let expected_kind = active_namespaced_kind(&record); + let plugin_type = match record.kind { + PluginKind::Wasm => PluginType::Wasm, + PluginKind::Native => PluginType::Native, + }; + + let mut summary = match self.load_from_path(plugin_type, &entrypoint_path) { + Ok(summary) => summary, + Err(err) => { + warn!( + error = %err, + kind = %expected_kind, + entrypoint = %entrypoint_path.display(), + "Failed to load active plugin" + ); + return None; + }, + }; + + if summary.kind != expected_kind { + let actual_kind = summary.kind; + warn!( + expected = %expected_kind, + actual = %actual_kind, + "Active plugin kind does not match record" + ); + let _ = self.unload_plugin(&actual_kind, false); + return None; + } + + let version = record.version; + if let Some(managed) = self.plugins.get_mut(&summary.kind) { + managed.version = Some(version.clone()); + } + summary.version = Some(version); + + Some(summary) + } + + fn read_active_record(record_path: &Path) -> Option { + let record_bytes = match std::fs::read(record_path) { + Ok(bytes) => bytes, + Err(err) => { + warn!(error = %err, file = ?record_path, "Failed to read active plugin record"); + return None; + }, + }; + match serde_json::from_slice(&record_bytes) { + Ok(record) => Some(record), + Err(err) => { + warn!(error = %err, file = ?record_path, "Failed to parse active plugin record"); + None + }, + } + } + + fn validate_active_entrypoint( + record_path: &Path, + record: &crate::plugin_records::ActivePluginRecord, + base_real: Option<&Path>, + ) -> Option { + let entrypoint_path = PathBuf::from(&record.entrypoint); + if !entrypoint_path.exists() { + warn!( + file = ?record_path, + entrypoint = %entrypoint_path.display(), + "Active plugin entrypoint missing" + ); + return None; + } + if let (Some(base_real), Ok(entrypoint_real)) = + (base_real, std::fs::canonicalize(&entrypoint_path)) + { + if !entrypoint_real.starts_with(base_real) { + warn!( + file = ?record_path, + entrypoint = %entrypoint_real.display(), + "Active plugin entrypoint is outside plugin directory" + ); + return None; + } + } + + Some(entrypoint_path) + } + /// Loads all existing plugins from both WASM and native directories. /// Native plugins are loaded first as they are faster to initialize. /// @@ -249,7 +391,8 @@ impl UnifiedPluginManager { /// Returns an error if the plugin directories cannot be read. /// Individual plugin load failures are logged but do not prevent other plugins from loading. pub fn load_existing(&mut self) -> Result> { - let mut summaries = self.load_native_plugins_from_dir()?; + let mut summaries = self.load_active_plugins_from_dir()?; + summaries.extend(self.load_native_plugins_from_dir()?); summaries.extend(self.load_wasm_plugins_from_dir()?); Ok(summaries) } @@ -314,9 +457,20 @@ impl UnifiedPluginManager { tokio::spawn(async move { info!("Starting background plugin loading"); - let result = { - let mut mgr = manager.lock().await; - mgr.load_existing() + let result = match tokio::task::spawn_blocking({ + let manager = Arc::clone(&manager); + move || { + let mut mgr = manager.blocking_lock(); + mgr.load_existing() + } + }) + .await + { + Ok(result) => result, + Err(err) => { + warn!(error = %err, "Plugin load task panicked"); + return; + }, }; match result { @@ -603,6 +757,11 @@ impl UnifiedPluginManager { .collect() } + /// Returns true if the plugin kind is currently loaded. + pub fn is_plugin_loaded(&self, kind: &str) -> bool { + self.plugins.contains_key(kind) + } + /// Helper method to update the loaded plugins gauge by counting each type fn update_loaded_gauge(&self) { let wasm_count = @@ -748,6 +907,19 @@ impl UnifiedPluginManager { PluginType::Native => self.load_native_plugin(target_path), } } + + /// Loads a plugin from an existing on-disk path without moving it. + /// + /// # Errors + /// + /// Returns an error if the plugin fails to load. + pub fn load_from_path>( + &mut self, + plugin_type: PluginType, + path: P, + ) -> Result { + self.load_from_written_path(plugin_type, path.as_ref().to_path_buf()) + } } /// Convenience alias for sharing the unified plugin manager behind an async mutex. diff --git a/apps/skit/src/server.rs b/apps/skit/src/server.rs index d26f0766..d3e4ab98 100644 --- a/apps/skit/src/server.rs +++ b/apps/skit/src/server.rs @@ -36,6 +36,12 @@ use tower_http::{ use tracing::{debug, error, info, warn}; use crate::file_security; +use crate::marketplace_installer::InstallPluginRequest; +use crate::marketplace_security::{origin_key, MarketplaceUrlPolicy, OriginKey}; +use crate::plugin_paths; +use crate::plugin_records::{ + active_dir as plugin_active_dir, namespaced_kind as active_namespaced_kind, ActivePluginRecord, +}; use crate::plugins::UnifiedPluginManager; use crate::profiling; use crate::state::AppState; @@ -53,7 +59,7 @@ use crate::config::Config; use tokio_stream::wrappers::ReceiverStream; use tokio_util::sync::CancellationToken; -use anyhow::Error as AnyhowError; +use anyhow::{Context, Error as AnyhowError}; use futures::{Stream, StreamExt}; use serde::{Deserialize, Serialize}; use tokio::io::AsyncWriteExt; @@ -660,7 +666,7 @@ async fn upload_plugin_handler( mut multipart: Multipart, ) -> Result { // Global hard gate: do not allow runtime plugin uploads unless explicitly enabled. - if !app_state.config.plugins.allow_http_management { + if !app_state.config.plugins.http_management.allow_http_management { return Err(PluginHttpError::Forbidden( "Plugin uploads are disabled by configuration. Set [plugins].allow_http_management = true to enable." .to_string(), @@ -678,9 +684,29 @@ async fn upload_plugin_handler( let mut plugin_file_name: Option = None; let mut temp_file_path: Option = None; + let mut declared_kind: Option = None; while let Some(field) = multipart.next_field().await? { let name = field.name().unwrap_or("").to_string(); + if name == "kind" { + if declared_kind.is_some() { + return Err(PluginHttpError::BadRequest( + "Multiple 'kind' fields provided".to_string(), + )); + } + let value = field.text().await.map_err(|e| { + PluginHttpError::BadRequest(format!("Failed to read 'kind' field: {e}")) + })?; + let value = value.trim().to_string(); + if value.is_empty() { + return Err(PluginHttpError::BadRequest( + "Plugin kind must not be empty".to_string(), + )); + } + declared_kind = Some(value); + continue; + } + if name != "plugin" { continue; } @@ -761,28 +787,545 @@ async fn upload_plugin_handler( let tmp_path = temp_file_path .ok_or_else(|| PluginHttpError::BadRequest("Missing 'plugin' file field".to_string()))?; - let mut manager = app_state.plugin_manager.lock().await; - let summary = manager.load_from_temp_file(&file_name, &tmp_path).map_err(|e| { - let _ = std::fs::remove_file(&tmp_path); - PluginHttpError::from(e) - })?; + let extension = std::path::Path::new(&file_name) + .extension() + .and_then(|ext| ext.to_str()) + .unwrap_or_default(); + let placeholder_kind = match extension { + "wasm" => Some("plugin::wasm::placeholder"), + "so" | "dylib" | "dll" => Some("plugin::native::placeholder"), + _ => None, + }; + + if let Some(kind) = declared_kind.as_ref() { + if let Some(placeholder) = placeholder_kind { + let expected_prefix = if placeholder.starts_with("plugin::wasm::") { + "plugin::wasm::" + } else { + "plugin::native::" + }; + if !kind.starts_with(expected_prefix) { + let _ = std::fs::remove_file(&tmp_path); + return Err(PluginHttpError::BadRequest(format!( + "Declared plugin kind '{kind}' does not match uploaded file type" + ))); + } + } + if !perms.is_plugin_allowed(kind) { + let _ = std::fs::remove_file(&tmp_path); + return Err(PluginHttpError::Forbidden(format!( + "Permission denied: plugin '{kind}' not allowed" + ))); + } + } else if let Some(placeholder) = placeholder_kind { + if !perms.is_plugin_allowed(placeholder) { + let _ = std::fs::remove_file(&tmp_path); + return Err(PluginHttpError::BadRequest( + "Plugin kind must be provided when allowed_plugins is restricted".to_string(), + )); + } + } + + let summary = match tokio::task::spawn_blocking({ + let manager = Arc::clone(&app_state.plugin_manager); + let file_name = file_name.clone(); + let tmp_path = tmp_path.clone(); + move || { + let mut mgr = manager.blocking_lock(); + mgr.load_from_temp_file(&file_name, &tmp_path) + } + }) + .await + { + Ok(Ok(summary)) => summary, + Ok(Err(err)) => { + let _ = std::fs::remove_file(&tmp_path); + return Err(PluginHttpError::from(err)); + }, + Err(err) => { + let _ = std::fs::remove_file(&tmp_path); + return Err(PluginHttpError::BadRequest(format!("Plugin load task failed: {err}"))); + }, + }; // Check if the loaded plugin is allowed if !perms.is_plugin_allowed(&summary.kind) { - // Unload the plugin since it's not allowed - let _ = manager.unload_plugin(&summary.kind, true); - drop(manager); + let _ = tokio::task::spawn_blocking({ + let manager = Arc::clone(&app_state.plugin_manager); + let kind = summary.kind.clone(); + move || { + let mut mgr = manager.blocking_lock(); + let _ = mgr.unload_plugin(&kind, true); + } + }) + .await; + return Err(PluginHttpError::Forbidden(format!( "Permission denied: plugin '{}' not allowed", summary.kind ))); } - drop(manager); + if let Some(kind) = declared_kind.as_ref() { + if summary.kind != *kind { + let _ = tokio::task::spawn_blocking({ + let manager = Arc::clone(&app_state.plugin_manager); + let summary_kind = summary.kind.clone(); + move || { + let mut mgr = manager.blocking_lock(); + let _ = mgr.unload_plugin(&summary_kind, true); + } + }) + .await; + + return Err(PluginHttpError::BadRequest(format!( + "Uploaded plugin kind '{}' does not match declared kind '{}'", + summary.kind, kind + ))); + } + } Ok((StatusCode::CREATED, Json(summary))) } +#[derive(Debug, Serialize)] +struct InstallPluginResponse { + job_id: String, +} + +async fn install_plugin_handler( + State(app_state): State>, + headers: HeaderMap, + Json(request): Json, +) -> Result { + if !app_state.config.plugins.marketplace.marketplace_enabled { + return Err(PluginHttpError::Forbidden( + "Marketplace installs are disabled by configuration. Set [plugins].marketplace_enabled = true to enable." + .to_string(), + )); + } + + let perms = crate::role_extractor::get_permissions(&headers, &app_state); + if !perms.load_plugins { + return Err(PluginHttpError::Forbidden( + "Permission denied: cannot install plugins".to_string(), + )); + } + + if !app_state.marketplace_jobs.is_registry_configured(&request.registry) { + return Err(PluginHttpError::BadRequest(format!( + "Registry '{registry}' is not configured", + registry = request.registry + ))); + } + + let job_id = app_state.marketplace_jobs.enqueue(request, perms).await; + Ok((StatusCode::ACCEPTED, Json(InstallPluginResponse { job_id }))) +} + +#[derive(Debug, Serialize)] +struct MarketplaceRegistry { + id: String, + url: String, +} + +async fn list_marketplace_registries_handler( + State(app_state): State>, + headers: HeaderMap, +) -> Result { + if !app_state.config.plugins.marketplace.marketplace_enabled { + return Err(( + StatusCode::FORBIDDEN, + "Marketplace browsing is disabled by configuration. Set [plugins].marketplace_enabled = true to enable." + .to_string(), + )); + } + + let perms = crate::role_extractor::get_permissions(&headers, &app_state); + if !perms.load_plugins { + return Err(( + StatusCode::FORBIDDEN, + "Permission denied: cannot view marketplace".to_string(), + )); + } + + let registries = app_state.marketplace_jobs.registries(); + let payload: Vec = + registries.into_iter().map(|url| MarketplaceRegistry { id: url.clone(), url }).collect(); + Ok(Json(payload)) +} + +#[derive(Debug, Deserialize)] +struct MarketplacePluginsQuery { + registry: String, + q: Option, +} + +async fn validate_marketplace_registry_url( + config: &crate::config::PluginConfig, + registry: &str, +) -> anyhow::Result<(MarketplaceUrlPolicy, reqwest::Url, OriginKey)> { + let policy = MarketplaceUrlPolicy::from_config(config); + let registry_url = policy.validate_url("registry index", registry, None).await?; + let registry_origin = origin_key(®istry_url)?; + Ok((policy, registry_url, registry_origin)) +} + +async fn validate_marketplace_plugin_urls( + policy: &MarketplaceUrlPolicy, + registry_origin: &OriginKey, + version: &crate::marketplace::RegistryPluginVersion, +) -> anyhow::Result<(reqwest::Url, reqwest::Url)> { + let manifest_url = + policy.validate_url("manifest url", &version.manifest_url, Some(registry_origin)).await?; + let signature_url_value = version + .signature_url + .as_deref() + .map_or_else(|| format!("{}.minisig", manifest_url.as_str()), str::to_string); + let signature_url = + policy.validate_url("signature url", &signature_url_value, Some(registry_origin)).await?; + Ok((manifest_url, signature_url)) +} + +#[cfg(test)] +mod marketplace_url_tests { + use super::{validate_marketplace_plugin_urls, validate_marketplace_registry_url}; + use anyhow::{bail, Result}; + + #[tokio::test] + async fn browsing_rejects_cross_origin_manifest() -> Result<()> { + let mut config = crate::config::PluginConfig::default(); + config.marketplace.security.marketplace_require_registry_origin = true; + let (policy, _registry_url, registry_origin) = + validate_marketplace_registry_url(&config, "https://registry.example.com/index.json") + .await?; + + let version = crate::marketplace::RegistryPluginVersion { + version: "1.0.0".to_string(), + manifest_url: "https://evil.example.com/manifest.json".to_string(), + signature_url: Some("https://registry.example.com/manifest.minisig".to_string()), + published_at: None, + }; + + match validate_marketplace_plugin_urls(&policy, ®istry_origin, &version).await { + Ok(_) => bail!("expected origin rejection"), + Err(err) => assert!(err.to_string().contains("origin")), + } + + Ok(()) + } +} + +async fn list_marketplace_plugins_handler( + State(app_state): State>, + headers: HeaderMap, + Query(query): Query, +) -> Result { + if !app_state.config.plugins.marketplace.marketplace_enabled { + return Err(( + StatusCode::FORBIDDEN, + "Marketplace browsing is disabled by configuration. Set [plugins].marketplace_enabled = true to enable." + .to_string(), + )); + } + + let perms = crate::role_extractor::get_permissions(&headers, &app_state); + if !perms.load_plugins { + return Err(( + StatusCode::FORBIDDEN, + "Permission denied: cannot view marketplace".to_string(), + )); + } + + let registry = query.registry.trim(); + if registry.is_empty() { + return Err((StatusCode::BAD_REQUEST, "Registry is required".to_string())); + } + if !app_state.marketplace_jobs.is_registry_configured(registry) { + return Err((StatusCode::BAD_REQUEST, format!("Registry '{registry}' is not configured"))); + } + + let (policy, registry_url, registry_origin) = + validate_marketplace_registry_url(&app_state.config.plugins, registry) + .await + .map_err(|err| (StatusCode::BAD_REQUEST, format!("Registry URL rejected: {err}")))?; + + let registry_client = app_state.marketplace_jobs.registry_client(); + let mut index = registry_client + .fetch_index_with_policy(®istry_url, &policy, ®istry_origin) + .await + .map_err(|err| { + (StatusCode::BAD_GATEWAY, format!("Failed to fetch registry index: {err}")) + })?; + + let filter = query.q.as_deref().map(str::trim).filter(|val| !val.is_empty()); + if let Some(filter) = filter { + let filter = filter.to_lowercase(); + index.plugins.retain(|plugin| marketplace_plugin_matches(plugin, &filter)); + } + + Ok(Json(index)) +} + +fn marketplace_plugin_matches(plugin: &crate::marketplace::RegistryPlugin, filter: &str) -> bool { + if plugin.id.to_lowercase().contains(filter) { + return true; + } + if let Some(name) = plugin.name.as_ref() { + if name.to_lowercase().contains(filter) { + return true; + } + } + if let Some(description) = plugin.description.as_ref() { + if description.to_lowercase().contains(filter) { + return true; + } + } + false +} + +#[derive(Debug, Deserialize)] +struct MarketplacePluginQuery { + registry: String, + version: Option, +} + +#[derive(Debug, Serialize)] +struct MarketplaceSignatureStatus { + verified: bool, + #[serde(skip_serializing_if = "Option::is_none")] + key_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + error: Option, +} + +#[derive(Debug, Serialize)] +struct MarketplacePluginDetails { + registry: String, + plugin: crate::marketplace::RegistryPlugin, + version: crate::marketplace::RegistryPluginVersion, + manifest: crate::marketplace::PluginManifest, + signature: MarketplaceSignatureStatus, + allow_native_marketplace: bool, +} + +async fn get_marketplace_plugin_handler( + State(app_state): State>, + headers: HeaderMap, + Path(plugin_id): Path, + Query(query): Query, +) -> Result { + if !app_state.config.plugins.marketplace.marketplace_enabled { + return Err(( + StatusCode::FORBIDDEN, + "Marketplace browsing is disabled by configuration. Set [plugins].marketplace_enabled = true to enable." + .to_string(), + )); + } + + let perms = crate::role_extractor::get_permissions(&headers, &app_state); + if !perms.load_plugins { + return Err(( + StatusCode::FORBIDDEN, + "Permission denied: cannot view marketplace".to_string(), + )); + } + + let registry = query.registry.trim(); + if registry.is_empty() { + return Err((StatusCode::BAD_REQUEST, "Registry is required".to_string())); + } + if !app_state.marketplace_jobs.is_registry_configured(registry) { + return Err((StatusCode::BAD_REQUEST, format!("Registry '{registry}' is not configured"))); + } + + let plugin_id = plugin_id.trim(); + if plugin_id.is_empty() { + return Err((StatusCode::BAD_REQUEST, "Plugin id is required".to_string())); + } + if let Err(err) = plugin_paths::validate_path_component("plugin id", plugin_id) { + return Err((StatusCode::BAD_REQUEST, err.to_string())); + } + + let (policy, registry_url, registry_origin) = + validate_marketplace_registry_url(&app_state.config.plugins, registry) + .await + .map_err(|err| (StatusCode::BAD_REQUEST, format!("Registry URL rejected: {err}")))?; + + let registry_client = app_state.marketplace_jobs.registry_client(); + let signature_verifier = app_state.marketplace_jobs.verifier(); + let index = registry_client + .fetch_index_with_policy(®istry_url, &policy, ®istry_origin) + .await + .map_err(|err| { + (StatusCode::BAD_GATEWAY, format!("Failed to fetch registry index: {err}")) + })?; + + let plugin = + index.plugins.into_iter().find(|plugin| plugin.id == plugin_id).ok_or_else(|| { + (StatusCode::NOT_FOUND, format!("Plugin '{plugin_id}' not found in registry")) + })?; + + let requested_version = query.version.as_deref().map(str::trim).filter(|val| !val.is_empty()); + let version_entry = if let Some(requested_version) = requested_version { + plugin.versions.iter().find(|version| version.version == requested_version) + } else if let Some(latest) = plugin.latest.as_ref() { + plugin + .versions + .iter() + .find(|version| version.version == *latest) + .or_else(|| plugin.versions.first()) + } else { + plugin.versions.first() + } + .cloned() + .ok_or_else(|| { + (StatusCode::NOT_FOUND, format!("No versions available for plugin '{plugin_id}'")) + })?; + + let (manifest_url, signature_url) = + validate_marketplace_plugin_urls(&policy, ®istry_origin, &version_entry).await.map_err( + |err| (StatusCode::BAD_GATEWAY, format!("Registry returned invalid URLs: {err}")), + )?; + + let manifest_entry = registry_client + .fetch_manifest_raw_with_policy(&manifest_url, &policy, ®istry_origin) + .await + .map_err(|err| { + (StatusCode::BAD_GATEWAY, format!("Failed to fetch plugin manifest: {err}")) + })?; + + let signature = match registry_client + .fetch_text_with_policy("signature url", &signature_url, &policy, ®istry_origin) + .await + { + Ok(signature_text) => { + match signature_verifier.verify(manifest_entry.bytes.as_ref(), &signature_text) { + Ok(verified_signature) => MarketplaceSignatureStatus { + verified: true, + key_id: Some(verified_signature.key_id), + error: None, + }, + Err(err) => MarketplaceSignatureStatus { + verified: false, + key_id: None, + error: Some(err.to_string()), + }, + } + }, + Err(err) => MarketplaceSignatureStatus { + verified: false, + key_id: None, + error: Some(err.to_string()), + }, + }; + + Ok(Json(MarketplacePluginDetails { + registry: registry.to_string(), + plugin, + version: version_entry, + manifest: manifest_entry.manifest, + signature, + allow_native_marketplace: app_state.config.plugins.marketplace.allow_native_marketplace, + })) +} + +async fn find_active_record_for_kind( + plugin_dir: &std::path::Path, + kind: &str, +) -> Option<(std::path::PathBuf, ActivePluginRecord)> { + let active_dir = plugin_active_dir(plugin_dir); + let mut entries = match tokio::fs::read_dir(&active_dir).await { + Ok(entries) => entries, + Err(err) => { + if err.kind() != std::io::ErrorKind::NotFound { + warn!(error = %err, dir = ?active_dir, "Failed to read active plugin records"); + } + return None; + }, + }; + + loop { + let entry = match entries.next_entry().await { + Ok(Some(entry)) => entry, + Ok(None) => break, + Err(err) => { + warn!(error = %err, dir = ?active_dir, "Failed to iterate active plugin records"); + break; + }, + }; + + let path = entry.path(); + if path.extension().and_then(|ext| ext.to_str()) != Some("json") { + continue; + } + let bytes = match tokio::fs::read(&path).await { + Ok(bytes) => bytes, + Err(err) => { + warn!(error = %err, file = ?path, "Failed to read active plugin record"); + continue; + }, + }; + let record: ActivePluginRecord = match serde_json::from_slice(&bytes) { + Ok(record) => record, + Err(err) => { + warn!(error = %err, file = ?path, "Failed to parse active plugin record"); + continue; + }, + }; + if active_namespaced_kind(&record) == kind { + return Some((path, record)); + } + } + + None +} + +async fn remove_active_record_and_bundle( + plugin_dir: &std::path::Path, + record_path: &std::path::Path, + record: &ActivePluginRecord, +) -> Result<(), anyhow::Error> { + tokio::fs::remove_file(record_path).await.with_context(|| { + format!("Failed to remove active record {record_path}", record_path = record_path.display()) + })?; + + let base_real = plugin_paths::canonicalize_existing_dir(plugin_dir).await?; + plugin_paths::validate_path_component("plugin id", &record.plugin_id)?; + plugin_paths::validate_path_component("plugin version", &record.version)?; + + let bundles_root = plugin_dir.join("bundles").join(&record.plugin_id); + let bundle_dir = bundles_root.join(&record.version); + if tokio::fs::try_exists(&bundle_dir).await.unwrap_or(false) { + let bundle_dir_real = + plugin_paths::ensure_existing_dir_under(&base_real, &bundle_dir, "bundle").await?; + tokio::fs::remove_dir_all(&bundle_dir_real).await.with_context(|| { + format!( + "Failed to remove bundle directory {bundle_dir_real}", + bundle_dir_real = bundle_dir_real.display() + ) + })?; + } + + if tokio::fs::try_exists(&bundles_root).await.unwrap_or(false) { + let bundles_root_real = + plugin_paths::ensure_existing_dir_under(&base_real, &bundles_root, "bundles").await?; + let mut entries = tokio::fs::read_dir(&bundles_root_real).await?; + if entries.next_entry().await?.is_none() { + let _ = tokio::fs::remove_dir(&bundles_root_real).await; + } + } + + let cache_root = plugin_dir.join("cache").join(&record.plugin_id); + if tokio::fs::try_exists(&cache_root).await.unwrap_or(false) { + let cache_root_real = + plugin_paths::ensure_existing_dir_under(&base_real, &cache_root, "cache").await?; + let _ = tokio::fs::remove_dir_all(&cache_root_real).await; + } + + Ok(()) +} + #[derive(Debug, Default, Deserialize)] struct DeletePluginQuery { #[serde(default)] @@ -796,7 +1339,7 @@ async fn delete_plugin_handler( Query(query): Query, ) -> Result { // Global hard gate: do not allow runtime plugin deletion unless explicitly enabled. - if !app_state.config.plugins.allow_http_management { + if !app_state.config.plugins.http_management.allow_http_management { return Err(PluginHttpError::Forbidden( "Plugin deletion is disabled by configuration. Set [plugins].allow_http_management = true to enable." .to_string(), @@ -817,14 +1360,83 @@ async fn delete_plugin_handler( )); } + let plugin_dir = std::path::PathBuf::from(&app_state.config.plugins.directory); + let active_record = find_active_record_for_kind(&plugin_dir, &kind).await; + info!(plugin_kind = %kind, keep_file = query.keep_file, "Deleting plugin"); - let mut manager = app_state.plugin_manager.lock().await; - let summary = manager.unload_plugin(&kind, !query.keep_file).map_err(PluginHttpError::from)?; - drop(manager); + let summary = match tokio::task::spawn_blocking({ + let manager = Arc::clone(&app_state.plugin_manager); + let kind = kind.clone(); + let remove_file = !query.keep_file; + move || { + let mut mgr = manager.blocking_lock(); + mgr.unload_plugin(&kind, remove_file) + } + }) + .await + { + Ok(Ok(summary)) => summary, + Ok(Err(err)) => return Err(PluginHttpError::from(err)), + Err(err) => { + return Err(PluginHttpError::BadRequest(format!("Plugin unload task failed: {err}"))); + }, + }; + + if let Some((record_path, record)) = active_record { + if query.keep_file { + info!( + plugin_id = %record.plugin_id, + version = %record.version, + "Kept marketplace bundle and active record for unloaded plugin" + ); + } else if let Err(err) = + remove_active_record_and_bundle(&plugin_dir, &record_path, &record).await + { + return Err(PluginHttpError::BadRequest(format!( + "Failed to uninstall marketplace plugin: {err}" + ))); + } + } Ok(Json(summary)) } +async fn get_job_handler( + State(app_state): State>, + headers: HeaderMap, + Path(job_id): Path, +) -> Result { + let perms = crate::role_extractor::get_permissions(&headers, &app_state); + if !perms.load_plugins { + return Err((StatusCode::FORBIDDEN, "Permission denied: cannot view jobs".to_string())); + } + + app_state + .marketplace_jobs + .get_job(&job_id) + .await + .map(Json) + .ok_or_else(|| (StatusCode::NOT_FOUND, format!("Job '{job_id}' not found"))) +} + +async fn cancel_job_handler( + State(app_state): State>, + headers: HeaderMap, + Path(job_id): Path, +) -> Result { + let perms = crate::role_extractor::get_permissions(&headers, &app_state); + if !perms.load_plugins { + return Err((StatusCode::FORBIDDEN, "Permission denied: cannot cancel jobs".to_string())); + } + + app_state + .marketplace_jobs + .cancel_job(&job_id) + .await + .map(Json) + .ok_or_else(|| (StatusCode::NOT_FOUND, format!("Job '{job_id}' not found"))) +} + async fn list_packet_types_handler() -> impl IntoResponse { let registry = streamkit_core::packet_meta::packet_type_registry(); Json(registry) @@ -2386,6 +2998,7 @@ pub fn create_app( let plugin_manager = UnifiedPluginManager::new( Arc::clone(&engine), resource_manager, + plugin_base_dir, wasm_plugin_dir, native_plugin_dir, ) @@ -2398,6 +3011,12 @@ pub fn create_app( config.resources.prewarm.clone(), ); + let marketplace_jobs = crate::marketplace_installer::InstallJobQueue::new( + &config.plugins, + Arc::clone(&plugin_manager), + ) + .expect("Failed to initialize marketplace installer"); + #[cfg(feature = "moq")] let moq_gateway = { let gateway = Arc::new(crate::moq_gateway::MoqGateway::new()); @@ -2423,6 +3042,7 @@ pub fn create_app( config: Arc::new(config), event_tx, plugin_manager, + marketplace_jobs, auth, #[cfg(feature = "moq")] moq_gateway, @@ -2441,6 +3061,10 @@ pub fn create_app( .route("/health", get(health_handler)) .route("/.well-known/jwks.json", get(jwks_handler)) .route("/api/v1/process", oneshot_route) + .route("/api/v1/marketplace/registries", get(list_marketplace_registries_handler)) + .route("/api/v1/marketplace/plugins", get(list_marketplace_plugins_handler)) + .route("/api/v1/marketplace/plugins/{plugin_id}", get(get_marketplace_plugin_handler)) + .route("/api/v1/plugins/install", post(install_plugin_handler)) .route( "/api/v1/plugins", get(list_plugins_handler) @@ -2449,6 +3073,8 @@ pub fn create_app( .layer(DefaultBodyLimit::max(app_state.config.server.max_body_size)), ) .route("/api/v1/plugins/{kind}", delete(delete_plugin_handler)) + .route("/api/v1/jobs/{job_id}", get(get_job_handler)) + .route("/api/v1/jobs/{job_id}/cancel", post(cancel_job_handler)) .route("/api/v1/control", get(websocket_handler)) .route("/api/v1/permissions", get(get_permissions_handler)) .route("/api/v1/config", get(get_config_handler)) @@ -2932,7 +3558,7 @@ pub async fn start_server(config: &Config) -> Result<(), Box, pub event_tx: broadcast::Sender, pub plugin_manager: SharedUnifiedPluginManager, + pub marketplace_jobs: InstallJobQueue, pub auth: Arc, #[cfg(feature = "moq")] pub moq_gateway: Option>, diff --git a/apps/skit/tests/plugin_integration_test.rs b/apps/skit/tests/plugin_integration_test.rs index 811a1c1f..c0e7a87e 100644 --- a/apps/skit/tests/plugin_integration_test.rs +++ b/apps/skit/tests/plugin_integration_test.rs @@ -20,6 +20,10 @@ use serde_json::json; use std::net::SocketAddr; use std::path::Path; use std::sync::{Arc, OnceLock}; +use streamkit_server::marketplace::PluginKind; +use streamkit_server::plugin_records::{ + active_dir as plugin_active_dir, record_path as plugin_record_path, ActivePluginRecord, +}; use streamkit_server::Config; use tokio::fs; use tokio::net::TcpListener; @@ -50,7 +54,13 @@ impl TestServer { let temp_dir = tempfile::tempdir().unwrap(); let plugins_dir = temp_dir.path().join("plugins"); fs::create_dir_all(&plugins_dir).await.unwrap(); + Self::start_with_dirs(temp_dir, plugins_dir).await + } + async fn start_with_dirs( + temp_dir: tempfile::TempDir, + plugins_dir: std::path::PathBuf, + ) -> Option { let listener = match TcpListener::bind("127.0.0.1:0").await { Ok(listener) => listener, Err(e) if e.kind() == std::io::ErrorKind::PermissionDenied => return None, @@ -60,7 +70,7 @@ impl TestServer { let mut config = Config::default(); config.plugins.directory = plugins_dir.to_string_lossy().to_string(); - config.plugins.allow_http_management = true; + config.plugins.http_management.allow_http_management = true; let (shutdown_tx, shutdown_rx) = oneshot::channel::<()>(); let handle = tokio::spawn(async move { @@ -88,6 +98,25 @@ impl TestServer { } } +async fn wait_for_plugin(client: &reqwest::Client, url: &str, kind: &str) -> bool { + let deadline = tokio::time::Instant::now() + Duration::from_secs(3); + loop { + if tokio::time::Instant::now() > deadline { + return false; + } + if let Ok(response) = client.get(url).send().await { + if response.status().is_success() { + if let Ok(plugins) = response.json::>().await { + if plugins.iter().any(|entry| entry.get("kind") == Some(&json!(kind))) { + return true; + } + } + } + } + tokio::time::sleep(Duration::from_millis(50)).await; + } +} + /// Build the gain plugin if not already built async fn ensure_gain_plugin_built() -> std::path::PathBuf { GAIN_PLUGIN_PATH @@ -331,6 +360,107 @@ async fn test_native_plugin_in_pipeline() { server.shutdown().await; } +#[tokio::test] +async fn test_load_active_plugin_on_startup() { + let _ = tracing_subscriber::fmt::try_init(); + let _permit = acquire_test_permit().await; + + let plugin_path = ensure_gain_plugin_built().await; + let temp_dir = tempfile::tempdir().unwrap(); + let plugins_dir = temp_dir.path().join("plugins"); + fs::create_dir_all(&plugins_dir).await.unwrap(); + + let bundle_dir = plugins_dir.join("bundles").join("gain").join("1.0.0"); + fs::create_dir_all(&bundle_dir).await.unwrap(); + let entrypoint_path = bundle_dir + .join(plugin_path.file_name().expect("plugin file name").to_string_lossy().to_string()); + fs::copy(&plugin_path, &entrypoint_path).await.unwrap(); + + fs::create_dir_all(plugin_active_dir(&plugins_dir)).await.unwrap(); + let record = ActivePluginRecord { + plugin_id: "gain".to_string(), + version: "1.0.0".to_string(), + node_kind: "gain".to_string(), + kind: PluginKind::Native, + entrypoint: entrypoint_path.to_string_lossy().into_owned(), + installed_at_ms: 0, + }; + let record_path = plugin_record_path(&plugins_dir, "gain").unwrap(); + fs::write(&record_path, serde_json::to_vec_pretty(&record).unwrap()).await.unwrap(); + + let Some(server) = TestServer::start_with_dirs(temp_dir, plugins_dir).await else { + eprintln!("Skipping plugin integration tests: local TCP bind not permitted"); + return; + }; + + let client = reqwest::Client::new(); + let url = format!("http://{}/api/v1/plugins", server.addr); + + assert!( + wait_for_plugin(&client, &url, "plugin::native::gain").await, + "Expected plugin to load from active record" + ); + + server.shutdown().await; +} + +#[tokio::test] +async fn test_uninstall_marketplace_plugin_removes_bundle() { + let _ = tracing_subscriber::fmt::try_init(); + let _permit = acquire_test_permit().await; + + let plugin_path = ensure_gain_plugin_built().await; + let temp_dir = tempfile::tempdir().unwrap(); + let plugins_dir = temp_dir.path().join("plugins"); + fs::create_dir_all(&plugins_dir).await.unwrap(); + + let bundle_dir = plugins_dir.join("bundles").join("gain").join("1.0.0"); + fs::create_dir_all(&bundle_dir).await.unwrap(); + let entrypoint_path = bundle_dir + .join(plugin_path.file_name().expect("plugin file name").to_string_lossy().to_string()); + fs::copy(&plugin_path, &entrypoint_path).await.unwrap(); + + fs::create_dir_all(plugin_active_dir(&plugins_dir)).await.unwrap(); + let record = ActivePluginRecord { + plugin_id: "gain".to_string(), + version: "1.0.0".to_string(), + node_kind: "gain".to_string(), + kind: PluginKind::Native, + entrypoint: entrypoint_path.to_string_lossy().into_owned(), + installed_at_ms: 0, + }; + let record_path = plugin_record_path(&plugins_dir, "gain").unwrap(); + fs::write(&record_path, serde_json::to_vec_pretty(&record).unwrap()).await.unwrap(); + + let Some(server) = TestServer::start_with_dirs(temp_dir, plugins_dir.clone()).await else { + eprintln!("Skipping plugin integration tests: local TCP bind not permitted"); + return; + }; + + let client = reqwest::Client::new(); + let url = format!("http://{}/api/v1/plugins", server.addr); + assert!( + wait_for_plugin(&client, &url, "plugin::native::gain").await, + "Expected plugin to load from active record" + ); + + let unload_url = format!("http://{}/api/v1/plugins/plugin%3A%3Anative%3A%3Again", server.addr); + let response = client.delete(&unload_url).send().await.expect("Failed to unload"); + assert_eq!(response.status(), StatusCode::OK); + + let plugins: Vec = + client.get(&url).send().await.unwrap().json().await.unwrap(); + assert_eq!(plugins.len(), 0, "Plugin should be unloaded"); + + assert!(!tokio::fs::try_exists(&record_path).await.unwrap(), "Active record should be removed"); + assert!( + !tokio::fs::try_exists(&bundle_dir).await.unwrap(), + "Bundle directory should be removed" + ); + + server.shutdown().await; +} + #[tokio::test] async fn test_load_invalid_plugin() { let _ = tracing_subscriber::fmt::try_init(); diff --git a/crates/engine/src/dynamic_actor.rs b/crates/engine/src/dynamic_actor.rs index ebd88818..efff4b5c 100644 --- a/crates/engine/src/dynamic_actor.rs +++ b/crates/engine/src/dynamic_actor.rs @@ -17,6 +17,7 @@ use crate::{ }; use opentelemetry::KeyValue; use std::collections::HashMap; +use std::sync::{Arc, RwLock}; use streamkit_core::control::{EngineControlMessage, NodeControlMessage}; use streamkit_core::error::StreamKitError; use streamkit_core::frame_pool::AudioFramePool; @@ -39,7 +40,7 @@ pub struct NodePinMetadata { /// The state for the long-running, dynamic engine actor (Control Plane). pub struct DynamicEngine { - pub(super) registry: NodeRegistry, + pub(super) registry: Arc>, pub(super) control_rx: mpsc::Receiver, pub(super) query_rx: mpsc::Receiver, pub(super) live_nodes: HashMap, @@ -916,7 +917,18 @@ impl DynamicEngine { EngineControlMessage::AddNode { node_id, kind, params } => { self.engine_operations_counter.add(1, &[KeyValue::new("operation", "add_node")]); tracing::info!(name = %node_id, kind = %kind, "Adding node to graph"); - match self.registry.create_node(&kind, params.as_ref()) { + let node_result = { + let registry = match self.registry.read() { + Ok(guard) => guard, + Err(err) => { + tracing::error!(error = %err, "Registry lock poisoned while adding node"); + return true; + }, + }; + registry.create_node(&kind, params.as_ref()) + }; + + match node_result { Ok(node) => { self.node_kinds.insert(node_id.clone(), kind.clone()); // Delegate initialization to helper function diff --git a/crates/engine/src/lib.rs b/crates/engine/src/lib.rs index bff31e87..e5a6fabb 100644 --- a/crates/engine/src/lib.rs +++ b/crates/engine/src/lib.rs @@ -215,19 +215,9 @@ impl Engine { "Starting Dynamic Engine actor" ); - // expect is documented in #[doc] Panics section above - #[allow(clippy::expect_used)] - let registry_snapshot = { - let guard = self - .registry - .read() - .expect("Engine registry poisoned while starting dynamic actor"); - guard.clone() - }; - let meter = global::meter("skit_engine"); let dynamic_engine = DynamicEngine { - registry: registry_snapshot, + registry: Arc::clone(&self.registry), control_rx, query_rx, live_nodes: HashMap::new(), diff --git a/crates/engine/src/tests/connection_types.rs b/crates/engine/src/tests/connection_types.rs index 3996f5b9..72076f52 100644 --- a/crates/engine/src/tests/connection_types.rs +++ b/crates/engine/src/tests/connection_types.rs @@ -6,6 +6,7 @@ use super::super::*; use crate::dynamic_actor::{DynamicEngine, NodePinMetadata}; +use streamkit_core::registry::NodeRegistry; use streamkit_core::types::{AudioFormat, PacketType, SampleFormat}; use streamkit_core::{InputPin, OutputPin, PinCardinality}; use tokio::sync::mpsc; @@ -20,7 +21,7 @@ fn create_test_engine() -> DynamicEngine { let meter = opentelemetry::global::meter("test"); DynamicEngine { - registry: NodeRegistry::new(), + registry: std::sync::Arc::new(std::sync::RwLock::new(NodeRegistry::new())), control_rx, query_rx, live_nodes: HashMap::new(), diff --git a/docs/astro.config.mjs b/docs/astro.config.mjs index e7ff87cf..e0e72afc 100644 --- a/docs/astro.config.mjs +++ b/docs/astro.config.mjs @@ -61,11 +61,14 @@ export default defineConfig({ { label: 'Observability', slug: 'guides/observability' }, { label: 'Script Node', slug: 'guides/script-node' }, { label: 'Using the Web UI', slug: 'guides/web-ui' }, + { label: 'Installing Plugins', slug: 'guides/installing-plugins' }, { label: 'Security', slug: 'guides/security' }, { label: 'Authentication', slug: 'guides/authentication' }, { label: 'Authorization & Roles', slug: 'guides/authorization' }, { label: 'Security Configuration', slug: 'guides/security-configuration' }, { label: 'Development Workflow', slug: 'guides/development' }, + { label: 'Publishing Plugins', slug: 'guides/publishing-plugins' }, + { label: 'Model Downloads', slug: 'guides/model-downloads' }, { label: 'Writing Plugins', slug: 'guides/writing-plugins' }, ], }, diff --git a/docs/src/content/docs/guides/creating-pipelines.md b/docs/src/content/docs/guides/creating-pipelines.md index 20cc1dec..d666aa14 100644 --- a/docs/src/content/docs/guides/creating-pipelines.md +++ b/docs/src/content/docs/guides/creating-pipelines.md @@ -27,7 +27,7 @@ steps: target_sample_rate: 16000 - kind: plugin::native::whisper params: - model_path: models/ggml-base.en-q5_1.bin + model_path: models/ggml-tiny.en-q5_1.bin - kind: core::json_serialize - kind: streamkit::http_output ``` diff --git a/docs/src/content/docs/guides/installing-plugins.md b/docs/src/content/docs/guides/installing-plugins.md new file mode 100644 index 00000000..2a305c67 --- /dev/null +++ b/docs/src/content/docs/guides/installing-plugins.md @@ -0,0 +1,124 @@ +--- +# SPDX-FileCopyrightText: © 2025 StreamKit Contributors +# SPDX-License-Identifier: MPL-2.0 +title: Installing Plugins +description: Install marketplace plugins or upload trusted plugins manually +--- + +StreamKit supports two install paths: + +- Marketplace installs (recommended) +- Manual upload (trusted code only) + +## Marketplace prerequisites + +Enable marketplace browsing and install gates: + +```toml +[plugins] +marketplace_enabled = true +allow_native_marketplace = false # set true only if you trust the registry +registries = ["https://streamkit.dev/marketplace/index.json"] +trusted_pubkeys = [ + "untrusted comment: minisign public key 81C485A94492F33F\nRWQ/85JEqYXEgX+2kl7Rwd8AcpVjYciSLzvLggzivbGyIrDPjfmcqjYP\n", +] +``` + +RBAC must allow plugin operations: + +- `load_plugins = true` to install +- `delete_plugins = true` to uninstall +- `allowed_plugins` must include the plugin kind (e.g., `plugin::native::whisper` or `plugin::*`) + +Optional model download settings: + +```toml +[plugins] +models_dir = "/var/lib/streamkit/models" +huggingface_token = "${HF_TOKEN}" +``` + +> [!NOTE] +> Marketplace installs are blocked when `[plugins].marketplace_enabled = false`. Native marketplace installs +> are blocked unless `[plugins].allow_native_marketplace = true`. + +## Marketplace URL security + +By default, marketplace URLs must use HTTPS and resolve to public hosts only. Localhost, private, +link-local, multicast, and `.local` hosts are blocked. Same-origin enforcement is optional; set +`marketplace_require_registry_origin = true` for stricter deployments. + +`marketplace_url_allowlist` relaxes only the same-origin requirement. It does **not** bypass HTTPS +or host/IP blocking. + +If you enable same-origin enforcement and your registry index is on GitHub Pages while bundles are +on GitHub Releases, you must allowlist all hosts in the redirect chain (the installer validates +every hop). Example: + +```toml +[plugins] +marketplace_url_allowlist = [ + "https://github.com", + "https://objects.githubusercontent.com", + "https://release-assets.githubusercontent.com", +] +``` + +For local testing, explicitly opt in: + +```toml +[plugins] +marketplace_scheme_policy = "allow_http" +marketplace_host_policy = "allow_private" +marketplace_url_allowlist = ["http://127.0.0.1:*"] +``` + +Optional DNS checks (best-effort): + +```toml +[plugins] +marketplace_resolve_hostnames = true +``` + +> [!NOTE] +> DNS rebinding cannot be fully prevented. Hostname validation happens at request time and DNS +> answers can change afterward. + +## Install via the UI + +1. Open **Admin → Plugins → Marketplace**. +2. Choose a registry and select a plugin. +3. Verify the signature status and review licenses. +4. Toggle **Download models after install** if available. When a plugin defines multiple models, + select the ones you want from the checklist. +5. Click **Install** and watch the progress job. + +If a model is marked as gated, the server must have a Hugging Face token configured or the job will fail. + +## Uninstalling plugins + +Use **Admin → Plugins → Installed** to unload a plugin. For marketplace installs, this removes the active +record and bundle on disk (unless you pass `keep_file=true` via the API). + +Manual API uninstall: + +```bash +curl -X DELETE "http://127.0.0.1:4545/api/v1/plugins/plugin%3A%3Anative%3A%3Again" +``` + +## Manual upload (trusted only) + +Manual upload is disabled by default and should be reserved for trusted environments. + +```toml +[plugins] +allow_http_management = true +``` + +Upload using the UI or: + +```bash +curl -F plugin=@libmy_plugin.so http://127.0.0.1:4545/api/v1/plugins +``` + +Manual uploads are stored under `.plugins/native/` or `.plugins/wasm/` and are loaded on restart. diff --git a/docs/src/content/docs/guides/model-downloads.md b/docs/src/content/docs/guides/model-downloads.md new file mode 100644 index 00000000..b1403f49 --- /dev/null +++ b/docs/src/content/docs/guides/model-downloads.md @@ -0,0 +1,73 @@ +--- +# SPDX-FileCopyrightText: © 2025 StreamKit Contributors +# SPDX-License-Identifier: MPL-2.0 +title: Model Downloads +description: Server-side model downloads for marketplace plugins +--- + +Marketplace plugins can declare model assets in their `manifest.json`. StreamKit downloads models +server-side and never exposes tokens to the browser. + +## Configuration + +```toml +[plugins] +models_dir = "/var/lib/streamkit/models" # defaults to ./models +huggingface_token = "${HF_TOKEN}" # required for gated Hugging Face models +allow_model_urls = false # set true to allow ModelSource::Url +``` + +`models_dir` is where model files are written. The paths inside `models[]` are preserved under this +directory. + +## Manifest fields + +Example `models[]` entries: + +```json +[ + { + "id": "whisper-tiny-en-q5_1", + "name": "Whisper tiny.en (q5_1)", + "default": true, + "source": "huggingface", + "repo_id": "streamkit/whisper-models", + "revision": "main", + "files": ["ggml-tiny.en-q5_1.bin"], + "sha256": "..." + }, + { + "id": "silero-vad", + "name": "Silero VAD", + "default": true, + "source": "url", + "url": "https://example.com/models/ten-vad.onnx", + "sha256": "abc123..." + } +] +``` + +Official plugins mirror models under `streamkit/-models` (for example, `streamkit/whisper-models`). + +If a model is `gated: true`, StreamKit requires a Hugging Face token to download it. + +`source: "url"` entries are disabled by default. When enabled, model URLs go through the same +marketplace URL policy (HTTPS required by default, blocked host/IP ranges, and optional same-origin +enforcement). + +Model entries may include `id`, `name`, and `default`. When present, the UI lets admins select +which models to download and preselects those marked `default`. + +Model files can be archives (`.tar`, `.tar.gz`, `.tgz`, `.tar.bz2`, `.tbz2`, `.tar.zst`, `.tzst`). +When an archive is downloaded, StreamKit extracts it into `models_dir` and keeps the archive file. + +## UI behavior + +The Marketplace install panel includes a **Download models after install** toggle when models are +declared. When model IDs are present, the UI shows a checklist for selective downloads. Progress +is tracked as part of the install job. + +## License disclosure + +Plugin authors should include `license` or `license_url` for each model. The UI prompts admins to +acknowledge model licenses before installing. diff --git a/docs/src/content/docs/guides/publishing-plugins.md b/docs/src/content/docs/guides/publishing-plugins.md new file mode 100644 index 00000000..25a39316 --- /dev/null +++ b/docs/src/content/docs/guides/publishing-plugins.md @@ -0,0 +1,124 @@ +--- +# SPDX-FileCopyrightText: © 2025 StreamKit Contributors +# SPDX-License-Identifier: MPL-2.0 +title: Publishing to a Registry +description: Package, sign, and publish plugins for the StreamKit marketplace +--- + +This guide covers the v1 registry format for publishing StreamKit plugins (native or WASM). + +## Bundle layout + +Each release ships a bundle archive (for example `bundle.tar.zst`) that contains the plugin binary +and optional license files: + +``` +libmy_plugin.so +LICENSES/ +``` + +The entrypoint path in the manifest must match the plugin binary inside the bundle. The manifest and +signature are hosted alongside the bundle in the registry. + +## Manifest format + +`manifest.json` describes the plugin and bundle. Example: + +```json +{ + "schema_version": 1, + "id": "whisper", + "name": "Whisper", + "version": "1.2.3", + "node_kind": "whisper", + "kind": "native", + "entrypoint": "libwhisper.so", + "description": "Streaming speech-to-text using whisper.cpp", + "license": "MIT", + "bundle": { + "url": "https://github.com/org/repo/releases/download/v1.2.3/bundle.tar.zst", + "sha256": "abc123..." + }, + "models": [ + { + "id": "whisper-tiny-en-q5_1", + "name": "Whisper tiny.en (q5_1)", + "default": true, + "source": "huggingface", + "repo_id": "streamkit/whisper-models", + "revision": "main", + "files": ["ggml-tiny.en-q5_1.bin"], + "sha256": "abc123..." + } + ] +} +``` + +`models[]` is optional. Files are downloaded into `[plugins].models_dir` and keep their relative +paths. When `id`/`name`/`default` are provided, the UI allows selecting which models to download. +If a model file is an archive (`.tar`, `.tar.gz`, `.tar.bz2`, `.tar.zst`, etc.), StreamKit +extracts it into `models_dir` after download. + +For the official registry, use `scripts/marketplace/upload_models_to_hf.py` to upload mirrored +model files to a per-plugin Hugging Face repo (for example, `streamkit/whisper-models`) before +publishing manifests. + +When manifests reference `.tar.*` bundles, pass `--create-archives` to build the archives from the +local model directories before upload. + +## Sign the manifest + +StreamKit uses minisign-compatible Ed25519 signatures. + +```bash +# Create a keypair (once) +minisign -G -p streamkit.pub -s streamkit.key + +# Sign the manifest +minisign -S -s streamkit.key -m manifest.json -x manifest.minisig +``` + +Server admins must add the public key to `[plugins].trusted_pubkeys`. + +## Registry index + +Registries are static JSON files served over HTTPS. Example `index.json`: + +```json +{ + "schema_version": 1, + "plugins": [ + { + "id": "whisper", + "name": "Whisper", + "description": "Streaming speech-to-text using whisper.cpp", + "latest": "1.2.3", + "versions": [ + { + "version": "1.2.3", + "manifest_url": "https://example.com/plugins/whisper/1.2.3/manifest.json", + "signature_url": "https://example.com/plugins/whisper/1.2.3/manifest.minisig", + "published_at": "2025-01-24" + } + ] + } + ] +} +``` + +By default, StreamKit requires HTTPS and blocks non-public hosts. Same-origin enforcement is +optional; when enabled, manifest/signature/bundle URLs must share origin with the registry index. +If you host bundles or manifests on a different origin, admins must allowlist that origin (and +explicitly allow HTTP if they choose to use it). + +## Recommended hosting + +- Registry metadata: GitHub Pages (static `index.json` + per-version manifests). +- Bundles: GitHub Releases (large immutable binaries). + +If you mix GitHub Pages (registry/manifest) with GitHub Releases (bundles) and admins enable +`marketplace_require_registry_origin`, they must allowlist the release hosts because redirects are +validated per hop. The allowlist should include `https://github.com` plus the release asset hosts +(commonly `https://objects.githubusercontent.com` and `https://release-assets.githubusercontent.com`). + +See [Installing Plugins](/guides/installing-plugins/) for the admin workflow. diff --git a/docs/src/content/docs/guides/web-ui.md b/docs/src/content/docs/guides/web-ui.md index 6282a315..3f1e7ba1 100644 --- a/docs/src/content/docs/guides/web-ui.md +++ b/docs/src/content/docs/guides/web-ui.md @@ -46,7 +46,7 @@ Design View is the default route and is split into three panes: - **Left pane**: library and tools: - **Nodes**: the node palette/library (built-ins + loaded plugins). - - **Plugins**: view/manage loaded plugins (availability depends on your role/config). + - **Plugins**: view/manage loaded plugins and marketplace installs (availability depends on your role/config). - **Samples**: example pipelines you can load as a starting point. - **Fragments**: reusable building blocks you can drop into a graph. - **Center pane (canvas)**: a React Flow editor where you: diff --git a/docs/src/content/docs/guides/writing-plugins.md b/docs/src/content/docs/guides/writing-plugins.md index b7191070..e6553fa4 100644 --- a/docs/src/content/docs/guides/writing-plugins.md +++ b/docs/src/content/docs/guides/writing-plugins.md @@ -34,6 +34,12 @@ Uploaded plugins are stored under your configured plugin directory (default: `.p - `.plugins/native/` - `.plugins/wasm/` +- Marketplace installs use `.plugins/bundles/` and `.plugins/active/`. + +## Marketplace packaging + +StreamKit also supports signed marketplace bundles with `manifest.json` + `manifest.minisig`. +See [Publishing to a Registry](/guides/publishing-plugins/) for the bundle layout and signing steps. ## Native Plugins diff --git a/docs/src/content/docs/reference/configuration-generated.md b/docs/src/content/docs/reference/configuration-generated.md index c00f086f..ed25a88b 100644 --- a/docs/src/content/docs/reference/configuration-generated.md +++ b/docs/src/content/docs/reference/configuration-generated.md @@ -9,32 +9,29 @@ description: Auto-generated configuration reference from schema and defaults This page is auto-generated from the server's configuration schema and `Config::default()`. For a human-friendly guide and examples, see [Configuration](./configuration/). -## `[auth]` +## `[server]` -Authentication configuration for built-in JWT-based auth. +HTTP server configuration including TLS and CORS settings. | Option | Type | Default | Description | |--------|------|---------|-------------| -| `api_default_ttl_secs` | integer (uint64) | `86400` | Default TTL for API tokens in seconds. Default: 86400 (24 hours) | -| `api_max_ttl_secs` | integer (uint64) | `2592000` | Maximum TTL for API tokens in seconds. Default: 2592000 (30 days) | -| `cookie_name` | string | `skit_session` | Cookie name for browser sessions. Default: "skit_session" | -| `mode` | string | `auto` | Authentication mode for the server. | -| `moq_default_ttl_secs` | integer (uint64) | `3600` | Default TTL for MoQ tokens in seconds. Default: 3600 (1 hour) | -| `moq_max_ttl_secs` | integer (uint64) | `86400` | Maximum TTL for MoQ tokens in seconds. Default: 86400 (1 day) | -| `state_dir` | string | `.streamkit/auth` | Directory for auth state (keys, tokens). Default: ".streamkit/auth" | +| `address` | string | `127.0.0.1:4545` | — | +| `tls` | boolean | `false` | — | +| `cert_path` | string | `` | — | +| `key_path` | string | `` | — | +| `samples_dir` | string | `./samples/pipelines` | — | +| `max_body_size` | integer (uint) | `104857600` | Maximum request body size in bytes for multipart uploads (default: 100MB) | +| `base_path` | null | string | `null` | Base path for subpath deployments (e.g., "/s/session_xxx"). Used to inject tag in HTML. If None, no tag is injected (root deployment). | +| `cors` | object | `{"allowed_origins":["http:/...` | CORS configuration for cross-origin requests. | -## `[engine]` +## `[security]` -Engine configuration for packet processing and buffering. +Security configuration for file access and other security-sensitive settings. | Option | Type | Default | Description | |--------|------|---------|-------------| -| `advanced` | object | `{"codec_channel_capacity":n...` | Advanced internal buffer configuration for power users. These settings affect async/blocking handoff channels in codec and container nodes. Most users should not need to modify these values. Only adjust if you understand the latency/throughput tradeoffs and have specific performance requirements. All values are in packets (not bytes). The actual memory footprint depends on packet size. | -| `node_input_capacity` | integer | null (uint) | `null` | Buffer size for node input channels (default: 128 packets) Higher = more buffering/latency, lower = more backpressure/responsiveness For low-latency streaming, consider 8-16 packets (~160-320ms at 20ms/frame) | -| `oneshot` | object | `{"io_channel_capacity":null...` | Oneshot pipeline configuration (HTTP batch processing). These settings apply to stateless pipelines executed via the `/api/v1/process` endpoint. Oneshot pipelines use larger buffers by default than dynamic sessions because they don't require tight backpressure coordination. | -| `packet_batch_size` | integer (uint) | `32` | Batch size for processing packets in nodes (default: 32) Lower values = more responsive to control messages, higher values = better throughput | -| `pin_distributor_capacity` | integer | null (uint) | `null` | Buffer size between node output and pin distributor (default: 64 packets) For low-latency streaming, consider 4-8 packets | -| `profile` | null | value | `null` | Optional tuning profile that provides sensible buffering defaults. Explicit values for `node_input_capacity` and/or `pin_distributor_capacity` take precedence. | +| `allowed_file_paths` | array | `["samples/**"]` | Allowed file paths for file_reader nodes. Supports glob patterns (e.g., "samples/**", "/data/media/*"). Relative paths are resolved against the server's working directory. Default: `["samples/**"]` - only allow reading from the samples directory. Set to `["**"]` to allow all paths (not recommended for production). | +| `allowed_write_paths` | array | `[]` | Allowed file paths for file_writer nodes. Default: empty (deny all writes). This is intentional: arbitrary file writes from user-provided pipelines are a high-risk capability. Patterns follow the same rules as `allowed_file_paths` and are matched against the resolved absolute target path. | ## `[log]` @@ -43,24 +40,37 @@ Logging configuration for console and file output. | Option | Type | Default | Description | |--------|------|---------|-------------| | `console_enable` | boolean | `true` | — | -| `console_level` | string enum[debug, info, warn, error] | `info` | Log level for filtering messages. | | `file_enable` | boolean | `true` | — | -| `file_format` | string | `text` | Log file format options. | +| `console_level` | string enum[debug, info, warn, error] | `info` | Log level for filtering messages. | | `file_level` | string enum[debug, info, warn, error] | `info` | Log level for filtering messages. | | `file_path` | string | `./skit.log` | — | +| `file_format` | string | `text` | Log file format options. | -## `[permissions]` +## `[telemetry]` -Permission configuration section for skit.toml. +Telemetry and observability configuration (OpenTelemetry, tokio-console). | Option | Type | Default | Description | |--------|------|---------|-------------| -| `allow_insecure_no_auth` | boolean | `false` | Allow starting the server on a non-loopback address without built-in auth or a trusted role header. This only applies when built-in auth is disabled. This is unsafe: all requests fall back to `SK_ROLE`/`default_role`. The server refuses to start in this configuration unless this flag is set. | -| `default_role` | string | `admin` | Default role for requests without an authenticated role When built-in auth is disabled, this becomes the effective role for requests that are not assigned a role via a trusted role header or `SK_ROLE`. For production deployments, prefer enabling built-in auth (`[auth].mode`) or running behind an authenticating reverse proxy that sets `[permissions].role_header`. | -| `max_concurrent_oneshots` | integer | null (uint) | `null` | Maximum concurrent oneshot pipelines (global limit) None = unlimited | -| `max_concurrent_sessions` | integer | null (uint) | `null` | Maximum concurrent dynamic sessions (global limit, applies to all users) None = unlimited | -| `role_header` | null | string | `null` | Optional trusted HTTP header used to select a role (e.g. "x-role" or "x-streamkit-role"). If unset, StreamKit ignores role headers entirely and uses `SK_ROLE`/`default_role`. Security note: Only enable this when running behind a trusted reverse proxy or auth layer that (a) authenticates the caller and (b) strips any incoming header with the same name before setting it. | -| `roles` | object | `{"admin":{"access_all_sessi...` | Map of role name -> permissions | +| `enable` | boolean | `true` | — | +| `tracing_enable` | boolean | `false` | Enable OpenTelemetry tracing (spans) export. Metrics export is controlled separately via `otlp_endpoint`. | +| `otlp_endpoint` | null | string | `null` | — | +| `otlp_traces_endpoint` | null | string | `null` | OTLP endpoint for trace export (e.g., `http://localhost:4318/v1/traces`). | +| `otlp_headers` | object | `{}` | — | +| `tokio_console` | boolean | `false` | — | + +## `[engine]` + +Engine configuration for packet processing and buffering. + +| Option | Type | Default | Description | +|--------|------|---------|-------------| +| `profile` | null | value | `null` | Optional tuning profile that provides sensible buffering defaults. Explicit values for `node_input_capacity` and/or `pin_distributor_capacity` take precedence. | +| `packet_batch_size` | integer (uint) | `32` | Batch size for processing packets in nodes (default: 32) Lower values = more responsive to control messages, higher values = better throughput | +| `node_input_capacity` | integer | null (uint) | `null` | Buffer size for node input channels (default: 128 packets) Higher = more buffering/latency, lower = more backpressure/responsiveness For low-latency streaming, consider 8-16 packets (~160-320ms at 20ms/frame) | +| `pin_distributor_capacity` | integer | null (uint) | `null` | Buffer size between node output and pin distributor (default: 64 packets) For low-latency streaming, consider 4-8 packets | +| `oneshot` | object | `{"packet_batch_size":32,"me...` | Oneshot pipeline configuration (HTTP batch processing). These settings apply to stateless pipelines executed via the `/api/v1/process` endpoint. Oneshot pipelines use larger buffers by default than dynamic sessions because they don't require tight backpressure coordination. | +| `advanced` | object | `{"codec_channel_capacity":n...` | Advanced internal buffer configuration for power users. These settings affect async/blocking handoff channels in codec and container nodes. Most users should not need to modify these values. Only adjust if you understand the latency/throughput tradeoffs and have specific performance requirements. All values are in packets (not bytes). The actual memory footprint depends on packet size. | ## `[plugins]` @@ -68,8 +78,20 @@ Plugin directory configuration. | Option | Type | Default | Description | |--------|------|---------|-------------| -| `allow_http_management` | boolean | `false` | Controls whether runtime plugin upload/delete is allowed via the public APIs. Default is false to avoid accidental exposure when running without an auth layer. | | `directory` | string | `.plugins` | — | +| `allow_http_management` | boolean | `false` | Controls whether runtime plugin upload/delete is allowed via the public APIs. Default is false to avoid accidental exposure when running without an auth layer. | +| `marketplace_enabled` | boolean | `false` | Enables the plugin marketplace API and UI (default: false). | +| `allow_native_marketplace` | boolean | `false` | Allows native plugins to be installed from a marketplace (default: false). Native plugins run in-process and are unsafe without full trust. | +| `allow_model_urls` | boolean | `false` | Allow direct URL model downloads from manifests (default: false). | +| `marketplace_require_registry_origin` | boolean | `false` | Require marketplace URLs to share origin with the registry (default: false). | +| `marketplace_scheme_policy` | string enum[https_only, allow_http] | `https_only` | — | +| `marketplace_host_policy` | string enum[public_only, allow_private] | `public_only` | — | +| `marketplace_resolve_hostnames` | boolean | `false` | Resolve hostnames for marketplace URLs and check resolved IPs (default: false). | +| `marketplace_url_allowlist` | array | `[]` | Allowed marketplace origins (e.g., "https://example.com", "https://example.com:*"). | +| `trusted_pubkeys` | array | `[]` | Minisign public keys (contents of `.pub` files) trusted for marketplace manifests. | +| `registries` | array | `[]` | Registry index URLs (e.g., `https://example.com/index.json`). | +| `models_dir` | null | string | `null` | Optional directory to store downloaded models (defaults to `models` when unset). | +| `huggingface_token` | null | string | `null` | Optional Hugging Face token for gated model downloads. | ## `[resources]` @@ -81,53 +103,43 @@ Resource management configuration for ML models and shared resources. | `max_memory_mb` | integer | null (uint) | `null` | Optional memory limit in megabytes for cached resources (models). When set, least-recently-used resources will be evicted to stay under the limit. Only applies when keep_models_loaded is false. | | `prewarm` | object | `{"enabled":false,"plugins":[]}` | Configuration for pre-warming plugins at startup. | -## `[script]` - -Configuration for the core::script node. - -| Option | Type | Default | Description | -|--------|------|---------|-------------| -| `default_memory_limit_mb` | integer (uint) | `64` | Default memory limit for QuickJS runtime (in megabytes) | -| `default_timeout_ms` | integer (uint64) | `100` | Default timeout for script execution per packet (in milliseconds) | -| `global_fetch_allowlist` | array | `[]` | Global fetch allowlist (empty = block all fetch() calls) Applies to all script nodes. Security note: there is no per-pipeline allowlist override; this prevents bypass via user-provided pipelines. | -| `secrets` | object | `{}` | Available secrets (name → environment variable mapping) Empty map = no secrets available to any script node Secrets are loaded from environment variables at server startup and can be injected into HTTP headers via pipeline configuration | - -## `[security]` +## `[permissions]` -Security configuration for file access and other security-sensitive settings. +Permission configuration section for skit.toml. | Option | Type | Default | Description | |--------|------|---------|-------------| -| `allowed_file_paths` | array | `["samples/**"]` | Allowed file paths for file_reader nodes. Supports glob patterns (e.g., "samples/**", "/data/media/*"). Relative paths are resolved against the server's working directory. Default: `["samples/**"]` - only allow reading from the samples directory. Set to `["**"]` to allow all paths (not recommended for production). | -| `allowed_write_paths` | array | `[]` | Allowed file paths for file_writer nodes. Default: empty (deny all writes). This is intentional: arbitrary file writes from user-provided pipelines are a high-risk capability. Patterns follow the same rules as `allowed_file_paths` and are matched against the resolved absolute target path. | +| `default_role` | string | `admin` | Default role for requests without an authenticated role When built-in auth is disabled, this becomes the effective role for requests that are not assigned a role via a trusted role header or `SK_ROLE`. For production deployments, prefer enabling built-in auth (`[auth].mode`) or running behind an authenticating reverse proxy that sets `[permissions].role_header`. | +| `role_header` | null | string | `null` | Optional trusted HTTP header used to select a role (e.g. "x-role" or "x-streamkit-role"). If unset, StreamKit ignores role headers entirely and uses `SK_ROLE`/`default_role`. Security note: Only enable this when running behind a trusted reverse proxy or auth layer that (a) authenticates the caller and (b) strips any incoming header with the same name before setting it. | +| `allow_insecure_no_auth` | boolean | `false` | Allow starting the server on a non-loopback address without built-in auth or a trusted role header. This only applies when built-in auth is disabled. This is unsafe: all requests fall back to `SK_ROLE`/`default_role`. The server refuses to start in this configuration unless this flag is set. | +| `roles` | object | `{"admin":{"create_sessions"...` | Map of role name -> permissions | +| `max_concurrent_sessions` | integer | null (uint) | `null` | Maximum concurrent dynamic sessions (global limit, applies to all users) None = unlimited | +| `max_concurrent_oneshots` | integer | null (uint) | `null` | Maximum concurrent oneshot pipelines (global limit) None = unlimited | -## `[server]` +## `[script]` -HTTP server configuration including TLS and CORS settings. +Configuration for the core::script node. | Option | Type | Default | Description | |--------|------|---------|-------------| -| `address` | string | `127.0.0.1:4545` | — | -| `base_path` | null | string | `null` | Base path for subpath deployments (e.g., "/s/session_xxx"). Used to inject tag in HTML. If None, no tag is injected (root deployment). | -| `cert_path` | string | `` | — | -| `cors` | object | `{"allowed_origins":["http:/...` | CORS configuration for cross-origin requests. | -| `key_path` | string | `` | — | -| `max_body_size` | integer (uint) | `104857600` | Maximum request body size in bytes for multipart uploads (default: 100MB) | -| `samples_dir` | string | `./samples/pipelines` | — | -| `tls` | boolean | `false` | — | +| `default_timeout_ms` | integer (uint64) | `100` | Default timeout for script execution per packet (in milliseconds) | +| `default_memory_limit_mb` | integer (uint) | `64` | Default memory limit for QuickJS runtime (in megabytes) | +| `global_fetch_allowlist` | array | `[]` | Global fetch allowlist (empty = block all fetch() calls) Applies to all script nodes. Security note: there is no per-pipeline allowlist override; this prevents bypass via user-provided pipelines. | +| `secrets` | object | `{}` | Available secrets (name → environment variable mapping) Empty map = no secrets available to any script node Secrets are loaded from environment variables at server startup and can be injected into HTTP headers via pipeline configuration | -## `[telemetry]` +## `[auth]` -Telemetry and observability configuration (OpenTelemetry, tokio-console). +Authentication configuration for built-in JWT-based auth. | Option | Type | Default | Description | |--------|------|---------|-------------| -| `enable` | boolean | `true` | — | -| `otlp_endpoint` | null | string | `null` | — | -| `otlp_headers` | object | `{}` | — | -| `otlp_traces_endpoint` | null | string | `null` | OTLP endpoint for trace export (e.g., `http://localhost:4318/v1/traces`). | -| `tokio_console` | boolean | `false` | — | -| `tracing_enable` | boolean | `false` | Enable OpenTelemetry tracing (spans) export. Metrics export is controlled separately via `otlp_endpoint`. | +| `mode` | string | `auto` | Authentication mode for the server. | +| `state_dir` | string | `.streamkit/auth` | Directory for auth state (keys, tokens). Default: ".streamkit/auth" | +| `cookie_name` | string | `skit_session` | Cookie name for browser sessions. Default: "skit_session" | +| `api_default_ttl_secs` | integer (uint64) | `86400` | Default TTL for API tokens in seconds. Default: 86400 (24 hours) | +| `api_max_ttl_secs` | integer (uint64) | `2592000` | Maximum TTL for API tokens in seconds. Default: 2592000 (30 days) | +| `moq_default_ttl_secs` | integer (uint64) | `3600` | Default TTL for MoQ tokens in seconds. Default: 3600 (1 hour) | +| `moq_max_ttl_secs` | integer (uint64) | `86400` | Maximum TTL for MoQ tokens in seconds. Default: 86400 (1 day) | ## Raw JSON Schema @@ -136,144 +148,23 @@ Telemetry and observability configuration (OpenTelemetry, tokio-console). ```json { - "$defs": { - "AdvancedBufferConfig": { - "description": "Advanced internal buffer configuration for power users.\n\nThese settings affect async/blocking handoff channels in codec and container nodes.\nMost users should not need to modify these values. Only adjust if you understand\nthe latency/throughput tradeoffs and have specific performance requirements.\n\nAll values are in packets (not bytes). The actual memory footprint depends on packet size.", - "properties": { - "codec_channel_capacity": { - "description": "Capacity for codec processing channels (opus, flac, mp3) (default: 32)\nUsed for async/blocking handoff in codec nodes.", - "format": "uint", - "minimum": 0, - "type": [ - "integer", - "null" - ] - }, - "demuxer_buffer_size": { - "description": "Duplex buffer size for ogg demuxer in bytes (default: 65536)", - "format": "uint", - "minimum": 0, - "type": [ - "integer", - "null" - ] - }, - "moq_peer_channel_capacity": { - "description": "MoQ transport peer channel capacity (default: 100)\nUsed for network send/receive coordination in MoQ transport nodes.", - "format": "uint", - "minimum": 0, - "type": [ - "integer", - "null" - ] - }, - "stream_channel_capacity": { - "description": "Capacity for streaming reader channels (container demuxers) (default: 8)\nSmaller than codec channels because container frames may be larger.", - "format": "uint", - "minimum": 0, - "type": [ - "integer", - "null" - ] - } - }, - "type": "object" - }, - "AllowlistRule": { - "description": "URL allowlist rule for fetch() API in script nodes.", - "properties": { - "methods": { - "description": "Allowed HTTP methods", - "items": { - "type": "string" - }, - "type": "array" - }, - "url": { - "description": "URL pattern with wildcards (e.g., \"https://api.example.com/*\")", - "type": "string" - } - }, - "required": [ - "url", - "methods" - ], - "type": "object" - }, - "AuthConfig": { - "description": "Authentication configuration for built-in JWT-based auth.", - "properties": { - "api_default_ttl_secs": { - "default": 86400, - "description": "Default TTL for API tokens in seconds. Default: 86400 (24 hours)", - "format": "uint64", - "minimum": 0, - "type": "integer" - }, - "api_max_ttl_secs": { - "default": 2592000, - "description": "Maximum TTL for API tokens in seconds. Default: 2592000 (30 days)", - "format": "uint64", - "minimum": 0, - "type": "integer" - }, - "cookie_name": { - "default": "skit_session", - "description": "Cookie name for browser sessions. Default: \"skit_session\"", - "type": "string" - }, - "mode": { - "$ref": "#/$defs/AuthMode", - "default": "auto", - "description": "Authentication mode (auto, enabled, disabled)" - }, - "moq_default_ttl_secs": { - "default": 3600, - "description": "Default TTL for MoQ tokens in seconds. Default: 3600 (1 hour)", - "format": "uint64", - "minimum": 0, - "type": "integer" - }, - "moq_max_ttl_secs": { - "default": 86400, - "description": "Maximum TTL for MoQ tokens in seconds. Default: 86400 (1 day)", - "format": "uint64", - "minimum": 0, - "type": "integer" - }, - "state_dir": { - "default": ".streamkit/auth", - "description": "Directory for auth state (keys, tokens). Default: \".streamkit/auth\"", - "type": "string" - } - }, - "type": "object" - }, - "AuthMode": { - "description": "Authentication mode for the server.", - "oneOf": [ - { - "const": "auto", - "description": "Auto: disabled on loopback, enabled on non-loopback", - "type": "string" - }, - { - "const": "enabled", - "description": "Always require authentication", - "type": "string" - }, - { - "const": "disabled", - "description": "Disable authentication entirely (NOT recommended for production)", - "type": "string" - } - ] - }, - "CorsConfig": { - "description": "CORS configuration for cross-origin requests.", - "properties": { - "allowed_origins": { - "default": [ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Config", + "description": "Root configuration for the StreamKit server.", + "type": "object", + "properties": { + "server": { + "$ref": "#/$defs/ServerConfig", + "default": { + "address": "127.0.0.1:4545", + "tls": false, + "cert_path": "", + "key_path": "", + "samples_dir": "./samples/pipelines", + "max_body_size": 104857600, + "base_path": null, + "cors": { + "allowed_origins": [ "http://localhost", "https://localhost", "http://localhost:*", @@ -282,378 +173,814 @@ Telemetry and observability configuration (OpenTelemetry, tokio-console). "https://127.0.0.1", "http://127.0.0.1:*", "https://127.0.0.1:*" - ], - "description": "Allowed origins for CORS requests.\nSupports wildcards: \"http://localhost:*\" matches any port on localhost.\nDefault: localhost and 127.0.0.1 on any port (HTTP and HTTPS).\nSet to `[\"*\"]` to allow all origins (not recommended for production).", - "items": { - "type": "string" - }, - "type": "array" + ] } - }, - "type": "object" + } }, - "EngineConfig": { - "description": "Engine configuration for packet processing and buffering.", - "properties": { + "security": { + "$ref": "#/$defs/SecurityConfig", + "default": { + "allowed_file_paths": [ + "samples/**" + ], + "allowed_write_paths": [] + } + }, + "log": { + "$ref": "#/$defs/LogConfig", + "default": { + "console_enable": true, + "file_enable": true, + "console_level": "info", + "file_level": "info", + "file_path": "./skit.log", + "file_format": "text" + } + }, + "telemetry": { + "$ref": "#/$defs/TelemetryConfig", + "default": { + "enable": true, + "tracing_enable": false, + "otlp_endpoint": null, + "otlp_traces_endpoint": null, + "otlp_headers": {}, + "tokio_console": false + } + }, + "engine": { + "$ref": "#/$defs/EngineConfig", + "default": { + "profile": null, + "packet_batch_size": 32, + "node_input_capacity": null, + "pin_distributor_capacity": null, + "oneshot": { + "packet_batch_size": 32, + "media_channel_capacity": null, + "io_channel_capacity": null + }, "advanced": { - "$ref": "#/$defs/AdvancedBufferConfig", - "default": { - "codec_channel_capacity": null, - "demuxer_buffer_size": null, - "moq_peer_channel_capacity": null, - "stream_channel_capacity": null + "codec_channel_capacity": null, + "stream_channel_capacity": null, + "demuxer_buffer_size": null, + "moq_peer_channel_capacity": null + } + } + }, + "plugins": { + "$ref": "#/$defs/PluginConfig", + "default": { + "directory": ".plugins", + "allow_http_management": false, + "marketplace_enabled": false, + "allow_native_marketplace": false, + "allow_model_urls": false, + "marketplace_require_registry_origin": false, + "marketplace_scheme_policy": "https_only", + "marketplace_host_policy": "public_only", + "marketplace_resolve_hostnames": false, + "marketplace_url_allowlist": [], + "trusted_pubkeys": [], + "registries": [], + "models_dir": null, + "huggingface_token": null + } + }, + "resources": { + "$ref": "#/$defs/ResourceConfig", + "default": { + "keep_models_loaded": true, + "max_memory_mb": null, + "prewarm": { + "enabled": false, + "plugins": [] + } + } + }, + "permissions": { + "$ref": "#/$defs/PermissionsConfig", + "default": { + "default_role": "admin", + "role_header": null, + "allow_insecure_no_auth": false, + "roles": { + "user": { + "create_sessions": true, + "destroy_sessions": true, + "list_sessions": true, + "modify_sessions": true, + "tune_nodes": true, + "load_plugins": false, + "delete_plugins": false, + "list_nodes": true, + "list_samples": true, + "read_samples": true, + "write_samples": true, + "delete_samples": true, + "allowed_samples": [ + "oneshot/*.yml", + "oneshot/*.yaml", + "dynamic/*.yml", + "dynamic/*.yaml", + "user/*.yml", + "user/*.yaml" + ], + "allowed_nodes": [ + "audio::*", + "containers::*", + "transport::moq::*", + "core::passthrough", + "core::file_reader", + "core::pacer", + "core::json_serialize", + "core::text_chunker", + "core::script", + "core::telemetry_tap", + "core::telemetry_out", + "core::sink", + "plugin::*" + ], + "allowed_plugins": [ + "plugin::*" + ], + "access_all_sessions": false, + "upload_assets": true, + "delete_assets": true, + "allowed_assets": [ + "samples/audio/system/*", + "samples/audio/user/*" + ] + }, + "viewer": { + "create_sessions": false, + "destroy_sessions": false, + "list_sessions": true, + "modify_sessions": false, + "tune_nodes": false, + "load_plugins": false, + "delete_plugins": false, + "list_nodes": true, + "list_samples": true, + "read_samples": true, + "write_samples": false, + "delete_samples": false, + "allowed_samples": [ + "oneshot/*.yml", + "oneshot/*.yaml", + "dynamic/*.yml", + "dynamic/*.yaml", + "user/*.yml", + "user/*.yaml" + ], + "allowed_nodes": [ + "*" + ], + "allowed_plugins": [ + "*" + ], + "access_all_sessions": false, + "upload_assets": false, + "delete_assets": false, + "allowed_assets": [ + "samples/audio/system/*" + ] }, - "description": "Advanced buffer tuning for codec and container nodes.\nOnly modify if you understand the latency/throughput implications." + "admin": { + "create_sessions": true, + "destroy_sessions": true, + "list_sessions": true, + "modify_sessions": true, + "tune_nodes": true, + "load_plugins": true, + "delete_plugins": true, + "list_nodes": true, + "list_samples": true, + "read_samples": true, + "write_samples": true, + "delete_samples": true, + "allowed_samples": [ + "*" + ], + "allowed_nodes": [ + "*" + ], + "allowed_plugins": [ + "*" + ], + "access_all_sessions": true, + "upload_assets": true, + "delete_assets": true, + "allowed_assets": [ + "*" + ] + } }, - "node_input_capacity": { - "description": "Buffer size for node input channels (default: 128 packets)\nHigher = more buffering/latency, lower = more backpressure/responsiveness\nFor low-latency streaming, consider 8-16 packets (~160-320ms at 20ms/frame)", - "format": "uint", - "minimum": 0, - "type": [ - "integer", - "null" - ] + "max_concurrent_sessions": null, + "max_concurrent_oneshots": null + } + }, + "script": { + "$ref": "#/$defs/ScriptConfig", + "default": { + "default_timeout_ms": 100, + "default_memory_limit_mb": 64, + "global_fetch_allowlist": [], + "secrets": {} + } + }, + "auth": { + "$ref": "#/$defs/AuthConfig", + "default": { + "mode": "auto", + "state_dir": ".streamkit/auth", + "cookie_name": "skit_session", + "api_default_ttl_secs": 86400, + "api_max_ttl_secs": 2592000, + "moq_default_ttl_secs": 3600, + "moq_max_ttl_secs": 86400 + } + } + }, + "$defs": { + "ServerConfig": { + "description": "HTTP server configuration including TLS and CORS settings.", + "type": "object", + "properties": { + "address": { + "type": "string" }, - "oneshot": { - "$ref": "#/$defs/OneshotConfig", - "default": { - "io_channel_capacity": null, - "media_channel_capacity": null, - "packet_batch_size": 32 - }, - "description": "Configuration for oneshot (HTTP batch) pipelines." + "tls": { + "type": "boolean" }, - "packet_batch_size": { - "default": 32, - "description": "Batch size for processing packets in nodes (default: 32)\nLower values = more responsive to control messages, higher values = better throughput", - "format": "uint", - "minimum": 0, - "type": "integer" + "cert_path": { + "type": "string" }, - "pin_distributor_capacity": { - "description": "Buffer size between node output and pin distributor (default: 64 packets)\nFor low-latency streaming, consider 4-8 packets", + "key_path": { + "type": "string" + }, + "samples_dir": { + "type": "string" + }, + "max_body_size": { + "description": "Maximum request body size in bytes for multipart uploads (default: 100MB)", + "type": "integer", "format": "uint", "minimum": 0, + "default": 104857600 + }, + "base_path": { + "description": "Base path for subpath deployments (e.g., \"/s/session_xxx\"). Used to inject tag in HTML.\nIf None, no tag is injected (root deployment).", "type": [ - "integer", + "string", "null" ] }, - "profile": { - "anyOf": [ - { - "$ref": "#/$defs/EnginePerfProfile" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Optional tuning profile that provides sensible buffering defaults.\n\nExplicit values for `node_input_capacity` and/or `pin_distributor_capacity` take precedence." - } + "cors": { + "description": "CORS configuration for cross-origin requests", + "$ref": "#/$defs/CorsConfig", + "default": { + "allowed_origins": [ + "http://localhost", + "https://localhost", + "http://localhost:*", + "https://localhost:*", + "http://127.0.0.1", + "https://127.0.0.1", + "http://127.0.0.1:*", + "https://127.0.0.1:*" + ] + } + } }, - "type": "object" + "required": [ + "address", + "tls", + "cert_path", + "key_path", + "samples_dir" + ] }, - "EnginePerfProfile": { - "description": "Preset tuning profiles for the engine.", - "oneOf": [ - { - "const": "low-latency", - "description": "Low-latency real-time streaming (minimal buffering, more backpressure)", - "type": "string" - }, - { - "const": "balanced", - "description": "Balanced defaults for general streaming and interactive pipelines", - "type": "string" + "CorsConfig": { + "description": "CORS configuration for cross-origin requests.", + "type": "object", + "properties": { + "allowed_origins": { + "description": "Allowed origins for CORS requests.\nSupports wildcards: \"http://localhost:*\" matches any port on localhost.\nDefault: localhost and 127.0.0.1 on any port (HTTP and HTTPS).\nSet to `[\"*\"]` to allow all origins (not recommended for production).", + "type": "array", + "items": { + "type": "string" + }, + "default": [ + "http://localhost", + "https://localhost", + "http://localhost:*", + "https://localhost:*", + "http://127.0.0.1", + "https://127.0.0.1", + "http://127.0.0.1:*", + "https://127.0.0.1:*" + ] + } + } + }, + "SecurityConfig": { + "description": "Security configuration for file access and other security-sensitive settings.", + "type": "object", + "properties": { + "allowed_file_paths": { + "description": "Allowed file paths for file_reader nodes.\nSupports glob patterns (e.g., \"samples/**\", \"/data/media/*\").\nRelative paths are resolved against the server's working directory.\nDefault: `[\"samples/**\"]` - only allow reading from the samples directory.\nSet to `[\"**\"]` to allow all paths (not recommended for production).", + "type": "array", + "items": { + "type": "string" + }, + "default": [ + "samples/**" + ] }, - { - "const": "high-throughput", - "description": "High-throughput / batch processing (more buffering, higher latency)", - "type": "string" + "allowed_write_paths": { + "description": "Allowed file paths for file_writer nodes.\n\nDefault: empty (deny all writes). This is intentional: arbitrary file writes from\nuser-provided pipelines are a high-risk capability.\n\nPatterns follow the same rules as `allowed_file_paths` and are matched against the\nresolved absolute target path.", + "type": "array", + "items": { + "type": "string" + }, + "default": [] } - ] + } }, "LogConfig": { "description": "Logging configuration for console and file output.", + "type": "object", "properties": { "console_enable": { - "default": false, - "type": "boolean" + "type": "boolean", + "default": false + }, + "file_enable": { + "type": "boolean", + "default": false }, "console_level": { "$ref": "#/$defs/LogLevel", "default": "info" }, - "file_enable": { - "default": false, - "type": "boolean" - }, - "file_format": { - "$ref": "#/$defs/LogFormat", - "default": "text", - "description": "Format for file logging: \"text\" (default, faster) or \"json\" (structured)" - }, "file_level": { "$ref": "#/$defs/LogLevel", "default": "info" }, "file_path": { - "default": "", - "type": "string" + "type": "string", + "default": "" + }, + "file_format": { + "description": "Format for file logging: \"text\" (default, faster) or \"json\" (structured)", + "$ref": "#/$defs/LogFormat", + "default": "text" } - }, - "type": "object" + } + }, + "LogLevel": { + "description": "Log level for filtering messages.", + "type": "string", + "enum": [ + "debug", + "info", + "warn", + "error" + ] }, "LogFormat": { "description": "Log file format options.", "oneOf": [ { - "const": "text", "description": "Plain text format (faster, lower CPU overhead)", - "type": "string" + "type": "string", + "const": "text" }, { - "const": "json", "description": "JSON format (structured, better for log aggregation but ~2-3x slower)", - "type": "string" + "type": "string", + "const": "json" } ] }, - "LogLevel": { - "description": "Log level for filtering messages.", - "enum": [ - "debug", - "info", - "warn", - "error" - ], - "type": "string" - }, - "OneshotConfig": { - "description": "Oneshot pipeline configuration (HTTP batch processing).\n\nThese settings apply to stateless pipelines executed via the `/api/v1/process` endpoint.\nOneshot pipelines use larger buffers by default than dynamic sessions because they\ndon't require tight backpressure coordination.", + "TelemetryConfig": { + "description": "Telemetry and observability configuration (OpenTelemetry, tokio-console).", + "type": "object", "properties": { - "io_channel_capacity": { - "description": "Buffer size for I/O stream channels (default: 16)\nUsed for HTTP input/output streaming.", - "format": "uint", - "minimum": 0, + "enable": { + "type": "boolean", + "default": true + }, + "tracing_enable": { + "description": "Enable OpenTelemetry tracing (spans) export.\n\nMetrics export is controlled separately via `otlp_endpoint`.", + "type": "boolean", + "default": false + }, + "otlp_endpoint": { "type": [ - "integer", + "string", "null" ] }, - "media_channel_capacity": { - "description": "Buffer size for media channels between nodes (default: 256 packets)\nOneshot uses larger buffers than dynamic for batch efficiency.", - "format": "uint", - "minimum": 0, + "otlp_traces_endpoint": { + "description": "OTLP endpoint for trace export (e.g., `http://localhost:4318/v1/traces`).", "type": [ - "integer", + "string", "null" ] }, - "packet_batch_size": { - "default": 32, - "description": "Batch size for processing packets in oneshot pipelines (default: 32)\nLower values = more responsive, higher values = better throughput", - "format": "uint", - "minimum": 0, - "type": "integer" - } - }, - "type": "object" - }, - "Permissions": { - "description": "Represents a set of permissions granted to a role\n\nNote: We allow excessive bools here because permissions are inherently\nindependent boolean flags. Each field represents a distinct capability\nthat can be enabled or disabled. Converting to enums or state machines\nwould complicate the API without providing meaningful benefit.\nRole-based permissions for access control.", - "properties": { - "access_all_sessions": { - "default": false, - "description": "Can access any user's sessions (admin capability)", - "type": "boolean" - }, - "allowed_assets": { - "default": [], - "description": "Allowed audio asset paths (supports globs like \"samples/audio/system/*.opus\")\nEmpty list means no assets are allowed (deny by default).\nUse `[\"*\"]` to allow everything.", - "items": { - "type": "string" - }, - "type": "array" - }, - "allowed_nodes": { - "default": [], - "description": "Allowed node types (e.g., \"audio::gain\", \"transport::moq::*\")\nEmpty list means no nodes are allowed (deny by default).\nUse `[\"*\"]` to allow everything.", - "items": { - "type": "string" - }, - "type": "array" - }, - "allowed_plugins": { - "default": [], - "description": "Allowed plugin node kinds (e.g., \"plugin::native::whisper\", \"plugin::wasm::gain\", \"plugin::*\")\nEmpty list means no plugins are allowed (deny by default).\nUse `[\"*\"]` to allow everything.", - "items": { - "type": "string" - }, - "type": "array" - }, - "allowed_samples": { - "default": [], - "description": "Allowed sample pipeline paths (supports globs like \"oneshot/*.yml\").\n\nPaths are evaluated relative to `[server].samples_dir`.\nEmpty list means no samples are allowed (deny by default).\nUse `[\"*\"]` to allow everything.", - "items": { + "otlp_headers": { + "type": "object", + "additionalProperties": { "type": "string" }, - "type": "array" - }, - "create_sessions": { - "default": false, - "description": "Can create new sessions", - "type": "boolean" - }, - "delete_assets": { - "default": false, - "description": "Can delete audio assets (user assets only)", - "type": "boolean" - }, - "delete_plugins": { - "default": false, - "description": "Can delete plugins", - "type": "boolean" - }, - "delete_samples": { - "default": false, - "description": "Can delete user pipelines in `[server].samples_dir/user`", - "type": "boolean" - }, - "destroy_sessions": { - "default": false, - "description": "Can destroy sessions (their own or any depending on context)", - "type": "boolean" - }, - "list_nodes": { - "default": false, - "description": "Can view the list of available nodes", - "type": "boolean" - }, - "list_samples": { - "default": false, - "description": "Can list sample pipelines", - "type": "boolean" - }, - "list_sessions": { - "default": false, - "description": "Can list sessions (their own or all depending on context)", - "type": "boolean" - }, - "load_plugins": { - "default": false, - "description": "Can upload and load plugins (WASM or native)", - "type": "boolean" - }, - "modify_sessions": { - "default": false, - "description": "Can modify running sessions (add/remove nodes)", - "type": "boolean" - }, - "read_samples": { - "default": false, - "description": "Can read sample pipeline YAML", - "type": "boolean" - }, - "tune_nodes": { - "default": false, - "description": "Can tune parameters on running nodes", - "type": "boolean" - }, - "upload_assets": { - "default": false, - "description": "Can upload audio assets", - "type": "boolean" + "default": {} }, - "write_samples": { - "default": false, - "description": "Can save/update user pipelines in `[server].samples_dir/user`", - "type": "boolean" + "tokio_console": { + "type": "boolean", + "default": false } - }, - "type": "object" + } }, - "PermissionsConfig": { - "description": "Permission configuration section for skit.toml.", + "EngineConfig": { + "description": "Engine configuration for packet processing and buffering.", + "type": "object", "properties": { - "allow_insecure_no_auth": { - "default": false, - "description": "Allow starting the server on a non-loopback address without built-in auth or a trusted role\nheader.\n\nThis only applies when built-in auth is disabled.\n\nThis is unsafe: all requests fall back to `SK_ROLE`/`default_role`. The server refuses to\nstart in this configuration unless this flag is set.", - "type": "boolean" - }, - "default_role": { - "default": "admin", - "description": "Default role for requests without an authenticated role\n\nWhen built-in auth is disabled, this becomes the effective role for requests that are not\nassigned a role via a trusted role header or `SK_ROLE`.\n\nFor production deployments, prefer enabling built-in auth (`[auth].mode`) or running behind\nan authenticating reverse proxy that sets `[permissions].role_header`.", - "type": "string" + "profile": { + "description": "Optional tuning profile that provides sensible buffering defaults.\n\nExplicit values for `node_input_capacity` and/or `pin_distributor_capacity` take precedence.", + "anyOf": [ + { + "$ref": "#/$defs/EnginePerfProfile" + }, + { + "type": "null" + } + ], + "default": null }, - "max_concurrent_oneshots": { - "default": null, - "description": "Maximum concurrent oneshot pipelines (global limit)\nNone = unlimited", + "packet_batch_size": { + "description": "Batch size for processing packets in nodes (default: 32)\nLower values = more responsive to control messages, higher values = better throughput", + "type": "integer", "format": "uint", "minimum": 0, + "default": 32 + }, + "node_input_capacity": { + "description": "Buffer size for node input channels (default: 128 packets)\nHigher = more buffering/latency, lower = more backpressure/responsiveness\nFor low-latency streaming, consider 8-16 packets (~160-320ms at 20ms/frame)", "type": [ "integer", "null" - ] - }, - "max_concurrent_sessions": { - "default": null, - "description": "Maximum concurrent dynamic sessions (global limit, applies to all users)\nNone = unlimited", + ], + "format": "uint", + "minimum": 0 + }, + "pin_distributor_capacity": { + "description": "Buffer size between node output and pin distributor (default: 64 packets)\nFor low-latency streaming, consider 4-8 packets", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 0 + }, + "oneshot": { + "description": "Configuration for oneshot (HTTP batch) pipelines.", + "$ref": "#/$defs/OneshotConfig", + "default": { + "packet_batch_size": 32, + "media_channel_capacity": null, + "io_channel_capacity": null + } + }, + "advanced": { + "description": "Advanced buffer tuning for codec and container nodes.\nOnly modify if you understand the latency/throughput implications.", + "$ref": "#/$defs/AdvancedBufferConfig", + "default": { + "codec_channel_capacity": null, + "stream_channel_capacity": null, + "demuxer_buffer_size": null, + "moq_peer_channel_capacity": null + } + } + } + }, + "EnginePerfProfile": { + "description": "Preset tuning profiles for the engine.", + "oneOf": [ + { + "description": "Low-latency real-time streaming (minimal buffering, more backpressure)", + "type": "string", + "const": "low-latency" + }, + { + "description": "Balanced defaults for general streaming and interactive pipelines", + "type": "string", + "const": "balanced" + }, + { + "description": "High-throughput / batch processing (more buffering, higher latency)", + "type": "string", + "const": "high-throughput" + } + ] + }, + "OneshotConfig": { + "description": "Oneshot pipeline configuration (HTTP batch processing).\n\nThese settings apply to stateless pipelines executed via the `/api/v1/process` endpoint.\nOneshot pipelines use larger buffers by default than dynamic sessions because they\ndon't require tight backpressure coordination.", + "type": "object", + "properties": { + "packet_batch_size": { + "description": "Batch size for processing packets in oneshot pipelines (default: 32)\nLower values = more responsive, higher values = better throughput", + "type": "integer", "format": "uint", "minimum": 0, + "default": 32 + }, + "media_channel_capacity": { + "description": "Buffer size for media channels between nodes (default: 256 packets)\nOneshot uses larger buffers than dynamic for batch efficiency.", "type": [ "integer", "null" - ] + ], + "format": "uint", + "minimum": 0 + }, + "io_channel_capacity": { + "description": "Buffer size for I/O stream channels (default: 16)\nUsed for HTTP input/output streaming.", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 0 + } + } + }, + "AdvancedBufferConfig": { + "description": "Advanced internal buffer configuration for power users.\n\nThese settings affect async/blocking handoff channels in codec and container nodes.\nMost users should not need to modify these values. Only adjust if you understand\nthe latency/throughput tradeoffs and have specific performance requirements.\n\nAll values are in packets (not bytes). The actual memory footprint depends on packet size.", + "type": "object", + "properties": { + "codec_channel_capacity": { + "description": "Capacity for codec processing channels (opus, flac, mp3) (default: 32)\nUsed for async/blocking handoff in codec nodes.", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 0 + }, + "stream_channel_capacity": { + "description": "Capacity for streaming reader channels (container demuxers) (default: 8)\nSmaller than codec channels because container frames may be larger.", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 0 + }, + "demuxer_buffer_size": { + "description": "Duplex buffer size for ogg demuxer in bytes (default: 65536)", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 0 + }, + "moq_peer_channel_capacity": { + "description": "MoQ transport peer channel capacity (default: 100)\nUsed for network send/receive coordination in MoQ transport nodes.", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 0 + } + } + }, + "PluginConfig": { + "description": "Plugin directory configuration.", + "type": "object", + "properties": { + "directory": { + "type": "string" + }, + "allow_http_management": { + "description": "Controls whether runtime plugin upload/delete is allowed via the public APIs.\n\nDefault is false to avoid accidental exposure when running without an auth layer.", + "type": "boolean", + "default": false + }, + "marketplace_enabled": { + "description": "Enables the plugin marketplace API and UI (default: false).", + "type": "boolean", + "default": false + }, + "allow_native_marketplace": { + "description": "Allows native plugins to be installed from a marketplace (default: false).\n\nNative plugins run in-process and are unsafe without full trust.", + "type": "boolean", + "default": false + }, + "allow_model_urls": { + "description": "Allow direct URL model downloads from manifests (default: false).", + "type": "boolean", + "default": false + }, + "marketplace_require_registry_origin": { + "description": "Require marketplace URLs to share origin with the registry (default: false).", + "type": "boolean", + "default": false + }, + "marketplace_scheme_policy": { + "description": "Scheme policy for marketplace URLs (default: https_only).", + "$ref": "#/$defs/MarketplaceSchemePolicy", + "default": "https_only" + }, + "marketplace_host_policy": { + "description": "Host policy for marketplace URLs (default: public_only).", + "$ref": "#/$defs/MarketplaceHostPolicy", + "default": "public_only" + }, + "marketplace_resolve_hostnames": { + "description": "Resolve hostnames for marketplace URLs and check resolved IPs (default: false).", + "type": "boolean", + "default": false + }, + "marketplace_url_allowlist": { + "description": "Allowed marketplace origins (e.g., \"https://example.com\", \"https://example.com:*\").", + "type": "array", + "items": { + "type": "string" + }, + "default": [] + }, + "trusted_pubkeys": { + "description": "Minisign public keys (contents of `.pub` files) trusted for marketplace manifests.", + "type": "array", + "items": { + "type": "string" + }, + "default": [] + }, + "registries": { + "description": "Registry index URLs (e.g., `https://example.com/index.json`).", + "type": "array", + "items": { + "type": "string" + }, + "default": [] + }, + "models_dir": { + "description": "Optional directory to store downloaded models (defaults to `models` when unset).", + "type": [ + "string", + "null" + ], + "default": null + }, + "huggingface_token": { + "description": "Optional Hugging Face token for gated model downloads.", + "type": [ + "string", + "null" + ], + "default": null + } + }, + "required": [ + "directory" + ] + }, + "MarketplaceSchemePolicy": { + "type": "string", + "enum": [ + "https_only", + "allow_http" + ] + }, + "MarketplaceHostPolicy": { + "type": "string", + "enum": [ + "public_only", + "allow_private" + ] + }, + "ResourceConfig": { + "description": "Resource management configuration for ML models and shared resources.", + "type": "object", + "properties": { + "keep_models_loaded": { + "description": "Keep loaded resources (models) in memory until explicit unload (default: true).\nWhen false, resources may be evicted based on LRU policy if max_memory_mb is set.", + "type": "boolean", + "default": true + }, + "max_memory_mb": { + "description": "Optional memory limit in megabytes for cached resources (models).\nWhen set, least-recently-used resources will be evicted to stay under the limit.\nOnly applies when keep_models_loaded is false.", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 0 + }, + "prewarm": { + "description": "Pre-warming configuration for reducing first-use latency", + "$ref": "#/$defs/PrewarmConfig", + "default": { + "enabled": false, + "plugins": [] + } + } + } + }, + "PrewarmConfig": { + "description": "Configuration for pre-warming plugins at startup.", + "type": "object", + "properties": { + "enabled": { + "description": "Enable pre-warming (default: false)", + "type": "boolean", + "default": false + }, + "plugins": { + "description": "List of plugins to pre-warm with their parameters", + "type": "array", + "items": { + "$ref": "#/$defs/PrewarmPluginConfig" + }, + "default": [] + } + } + }, + "PrewarmPluginConfig": { + "description": "Configuration for a single plugin to pre-warm at startup.", + "type": "object", + "properties": { + "kind": { + "description": "Plugin kind (e.g., \"plugin::native::kokoro\", \"plugin::native::whisper\")", + "type": "string" + }, + "params": { + "description": "Parameters to use when creating the warmup instance\nThese should match the most common usage pattern", + "default": null + }, + "fallback_params": { + "description": "Optional fallback parameters to try if the primary params fail\nUseful for GPU plugins that should fallback to CPU", + "default": null + } + }, + "required": [ + "kind" + ] + }, + "PermissionsConfig": { + "description": "Permission configuration section for skit.toml.", + "type": "object", + "properties": { + "default_role": { + "description": "Default role for requests without an authenticated role\n\nWhen built-in auth is disabled, this becomes the effective role for requests that are not\nassigned a role via a trusted role header or `SK_ROLE`.\n\nFor production deployments, prefer enabling built-in auth (`[auth].mode`) or running behind\nan authenticating reverse proxy that sets `[permissions].role_header`.", + "type": "string", + "default": "admin" }, "role_header": { - "default": null, "description": "Optional trusted HTTP header used to select a role (e.g. \"x-role\" or \"x-streamkit-role\").\n\nIf unset, StreamKit ignores role headers entirely and uses `SK_ROLE`/`default_role`.\n\nSecurity note: Only enable this when running behind a trusted reverse proxy or\nauth layer that (a) authenticates the caller and (b) strips any incoming header\nwith the same name before setting it.", "type": [ "string", "null" - ] + ], + "default": null + }, + "allow_insecure_no_auth": { + "description": "Allow starting the server on a non-loopback address without built-in auth or a trusted role\nheader.\n\nThis only applies when built-in auth is disabled.\n\nThis is unsafe: all requests fall back to `SK_ROLE`/`default_role`. The server refuses to\nstart in this configuration unless this flag is set.", + "type": "boolean", + "default": false }, "roles": { + "description": "Map of role name -> permissions", + "type": "object", "additionalProperties": { "$ref": "#/$defs/Permissions" }, "default": { - "admin": { - "access_all_sessions": true, - "allowed_assets": [ - "*" - ], - "allowed_nodes": [ - "*" - ], - "allowed_plugins": [ - "*" - ], - "allowed_samples": [ - "*" - ], + "user": { "create_sessions": true, - "delete_assets": true, - "delete_plugins": true, - "delete_samples": true, "destroy_sessions": true, - "list_nodes": true, - "list_samples": true, "list_sessions": true, - "load_plugins": true, "modify_sessions": true, - "read_samples": true, "tune_nodes": true, - "upload_assets": true, - "write_samples": true - }, - "user": { - "access_all_sessions": false, - "allowed_assets": [ - "samples/audio/system/*", - "samples/audio/user/*" + "load_plugins": false, + "delete_plugins": false, + "list_nodes": true, + "list_samples": true, + "read_samples": true, + "write_samples": true, + "delete_samples": true, + "allowed_samples": [ + "oneshot/*.yml", + "oneshot/*.yaml", + "dynamic/*.yml", + "dynamic/*.yaml", + "user/*.yml", + "user/*.yaml" ], "allowed_nodes": [ "audio::*", @@ -673,40 +1000,27 @@ Telemetry and observability configuration (OpenTelemetry, tokio-console). "allowed_plugins": [ "plugin::*" ], - "allowed_samples": [ - "oneshot/*.yml", - "oneshot/*.yaml", - "dynamic/*.yml", - "dynamic/*.yaml", - "user/*.yml", - "user/*.yaml" - ], - "create_sessions": true, + "access_all_sessions": false, + "upload_assets": true, "delete_assets": true, + "allowed_assets": [ + "samples/audio/system/*", + "samples/audio/user/*" + ] + }, + "viewer": { + "create_sessions": false, + "destroy_sessions": false, + "list_sessions": true, + "modify_sessions": false, + "tune_nodes": false, + "load_plugins": false, "delete_plugins": false, - "delete_samples": true, - "destroy_sessions": true, "list_nodes": true, "list_samples": true, - "list_sessions": true, - "load_plugins": false, - "modify_sessions": true, "read_samples": true, - "tune_nodes": true, - "upload_assets": true, - "write_samples": true - }, - "viewer": { - "access_all_sessions": false, - "allowed_assets": [ - "samples/audio/system/*" - ], - "allowed_nodes": [ - "*" - ], - "allowed_plugins": [ - "*" - ], + "write_samples": false, + "delete_samples": false, "allowed_samples": [ "oneshot/*.yml", "oneshot/*.yaml", @@ -715,569 +1029,368 @@ Telemetry and observability configuration (OpenTelemetry, tokio-console). "user/*.yml", "user/*.yaml" ], - "create_sessions": false, + "allowed_nodes": [ + "*" + ], + "allowed_plugins": [ + "*" + ], + "access_all_sessions": false, + "upload_assets": false, "delete_assets": false, - "delete_plugins": false, - "delete_samples": false, - "destroy_sessions": false, + "allowed_assets": [ + "samples/audio/system/*" + ] + }, + "admin": { + "create_sessions": true, + "destroy_sessions": true, + "list_sessions": true, + "modify_sessions": true, + "tune_nodes": true, + "load_plugins": true, + "delete_plugins": true, "list_nodes": true, "list_samples": true, - "list_sessions": true, - "load_plugins": false, - "modify_sessions": false, "read_samples": true, - "tune_nodes": false, - "upload_assets": false, - "write_samples": false + "write_samples": true, + "delete_samples": true, + "allowed_samples": [ + "*" + ], + "allowed_nodes": [ + "*" + ], + "allowed_plugins": [ + "*" + ], + "access_all_sessions": true, + "upload_assets": true, + "delete_assets": true, + "allowed_assets": [ + "*" + ] } - }, - "description": "Map of role name -> permissions", - "type": "object" - } - }, - "type": "object" - }, - "PluginConfig": { - "description": "Plugin directory configuration.", - "properties": { - "allow_http_management": { - "default": false, - "description": "Controls whether runtime plugin upload/delete is allowed via the public APIs.\n\nDefault is false to avoid accidental exposure when running without an auth layer.", - "type": "boolean" - }, - "directory": { - "type": "string" - } - }, - "required": [ - "directory" - ], - "type": "object" - }, - "PrewarmConfig": { - "description": "Configuration for pre-warming plugins at startup.", - "properties": { - "enabled": { - "default": false, - "description": "Enable pre-warming (default: false)", - "type": "boolean" - }, - "plugins": { - "default": [], - "description": "List of plugins to pre-warm with their parameters", - "items": { - "$ref": "#/$defs/PrewarmPluginConfig" - }, - "type": "array" - } - }, - "type": "object" - }, - "PrewarmPluginConfig": { - "description": "Configuration for a single plugin to pre-warm at startup.", - "properties": { - "fallback_params": { - "default": null, - "description": "Optional fallback parameters to try if the primary params fail\nUseful for GPU plugins that should fallback to CPU" - }, - "kind": { - "description": "Plugin kind (e.g., \"plugin::native::kokoro\", \"plugin::native::whisper\")", - "type": "string" - }, - "params": { - "default": null, - "description": "Parameters to use when creating the warmup instance\nThese should match the most common usage pattern" - } - }, - "required": [ - "kind" - ], - "type": "object" - }, - "ResourceConfig": { - "description": "Resource management configuration for ML models and shared resources.", - "properties": { - "keep_models_loaded": { - "default": true, - "description": "Keep loaded resources (models) in memory until explicit unload (default: true).\nWhen false, resources may be evicted based on LRU policy if max_memory_mb is set.", - "type": "boolean" + } }, - "max_memory_mb": { - "description": "Optional memory limit in megabytes for cached resources (models).\nWhen set, least-recently-used resources will be evicted to stay under the limit.\nOnly applies when keep_models_loaded is false.", - "format": "uint", - "minimum": 0, + "max_concurrent_sessions": { + "description": "Maximum concurrent dynamic sessions (global limit, applies to all users)\nNone = unlimited", "type": [ "integer", "null" - ] - }, - "prewarm": { - "$ref": "#/$defs/PrewarmConfig", - "default": { - "enabled": false, - "plugins": [] - }, - "description": "Pre-warming configuration for reducing first-use latency" - } - }, - "type": "object" - }, - "ScriptConfig": { - "description": "Configuration for the core::script node.", - "properties": { - "default_memory_limit_mb": { - "default": 64, - "description": "Default memory limit for QuickJS runtime (in megabytes)", + ], "format": "uint", "minimum": 0, - "type": "integer" - }, - "default_timeout_ms": { - "default": 100, - "description": "Default timeout for script execution per packet (in milliseconds)", - "format": "uint64", - "minimum": 0, - "type": "integer" - }, - "global_fetch_allowlist": { - "default": [], - "description": "Global fetch allowlist (empty = block all fetch() calls)\nApplies to all script nodes.\n\nSecurity note: there is no per-pipeline allowlist override; this prevents bypass via\nuser-provided pipelines.", - "items": { - "$ref": "#/$defs/AllowlistRule" - }, - "type": "array" - }, - "secrets": { - "additionalProperties": { - "$ref": "#/$defs/SecretConfig" - }, - "default": {}, - "description": "Available secrets (name → environment variable mapping)\nEmpty map = no secrets available to any script node\nSecrets are loaded from environment variables at server startup\nand can be injected into HTTP headers via pipeline configuration", - "type": "object" - } - }, - "type": "object" - }, - "SecretConfig": { - "description": "Configuration for a single secret loaded from environment.", - "properties": { - "allowed_fetch_urls": { - "default": [], - "description": "Optional allowlist of URL patterns where this secret may be injected into `fetch()` headers.\n\nPatterns use the same format as `script.global_fetch_allowlist` entries:\n- `https://api.openai.com/*`\n- `https://api.openai.com/v1/chat/completions`\n\nEmpty = no additional restriction (backwards-compatible).", - "items": { - "type": "string" - }, - "type": "array" - }, - "description": { - "default": "", - "description": "Optional description for documentation", - "type": "string" - }, - "env": { - "description": "Environment variable name containing the secret value", - "type": "string" - }, - "type": { - "$ref": "#/$defs/SecretType", - "default": "string", - "description": "Type of secret (for validation and formatting)" - } - }, - "required": [ - "env" - ], - "type": "object" - }, - "SecretType": { - "description": "Type of secret for validation and documentation.", - "oneOf": [ - { - "const": "url", - "description": "URL (e.g., webhook URLs)", - "type": "string" - }, - { - "const": "token", - "description": "Bearer token", - "type": "string" + "default": null }, - { - "const": "apikey", - "description": "API key", - "type": "string" - }, - { - "const": "string", - "description": "Generic string", - "type": "string" - } - ] - }, - "SecurityConfig": { - "description": "Security configuration for file access and other security-sensitive settings.", - "properties": { - "allowed_file_paths": { - "default": [ - "samples/**" - ], - "description": "Allowed file paths for file_reader nodes.\nSupports glob patterns (e.g., \"samples/**\", \"/data/media/*\").\nRelative paths are resolved against the server's working directory.\nDefault: `[\"samples/**\"]` - only allow reading from the samples directory.\nSet to `[\"**\"]` to allow all paths (not recommended for production).", - "items": { - "type": "string" - }, - "type": "array" - }, - "allowed_write_paths": { - "default": [], - "description": "Allowed file paths for file_writer nodes.\n\nDefault: empty (deny all writes). This is intentional: arbitrary file writes from\nuser-provided pipelines are a high-risk capability.\n\nPatterns follow the same rules as `allowed_file_paths` and are matched against the\nresolved absolute target path.", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, - "ServerConfig": { - "description": "HTTP server configuration including TLS and CORS settings.", - "properties": { - "address": { - "type": "string" - }, - "base_path": { - "description": "Base path for subpath deployments (e.g., \"/s/session_xxx\"). Used to inject tag in HTML.\nIf None, no tag is injected (root deployment).", + "max_concurrent_oneshots": { + "description": "Maximum concurrent oneshot pipelines (global limit)\nNone = unlimited", "type": [ - "string", + "integer", "null" - ] - }, - "cert_path": { - "type": "string" - }, - "cors": { - "$ref": "#/$defs/CorsConfig", - "default": { - "allowed_origins": [ - "http://localhost", - "https://localhost", - "http://localhost:*", - "https://localhost:*", - "http://127.0.0.1", - "https://127.0.0.1", - "http://127.0.0.1:*", - "https://127.0.0.1:*" - ] - }, - "description": "CORS configuration for cross-origin requests" - }, - "key_path": { - "type": "string" - }, - "max_body_size": { - "default": 104857600, - "description": "Maximum request body size in bytes for multipart uploads (default: 100MB)", + ], "format": "uint", "minimum": 0, - "type": "integer" - }, - "samples_dir": { - "type": "string" - }, - "tls": { - "type": "boolean" - } - }, - "required": [ - "address", - "tls", - "cert_path", - "key_path", - "samples_dir" - ], - "type": "object" - }, - "TelemetryConfig": { - "description": "Telemetry and observability configuration (OpenTelemetry, tokio-console).", - "properties": { - "enable": { - "default": true, - "type": "boolean" - }, - "otlp_endpoint": { - "type": [ - "string", - "null" - ] - }, - "otlp_headers": { - "additionalProperties": { - "type": "string" - }, - "default": {}, - "type": "object" - }, - "otlp_traces_endpoint": { - "description": "OTLP endpoint for trace export (e.g., `http://localhost:4318/v1/traces`).", - "type": [ - "string", - "null" - ] - }, - "tokio_console": { - "default": false, - "type": "boolean" - }, - "tracing_enable": { - "default": false, - "description": "Enable OpenTelemetry tracing (spans) export.\n\nMetrics export is controlled separately via `otlp_endpoint`.", - "type": "boolean" - } - }, - "type": "object" - } - }, - "$schema": "https://json-schema.org/draft/2020-12/schema", - "description": "Root configuration for the StreamKit server.", - "properties": { - "auth": { - "$ref": "#/$defs/AuthConfig", - "default": { - "api_default_ttl_secs": 86400, - "api_max_ttl_secs": 2592000, - "cookie_name": "skit_session", - "mode": "auto", - "moq_default_ttl_secs": 3600, - "moq_max_ttl_secs": 86400, - "state_dir": ".streamkit/auth" - } - }, - "engine": { - "$ref": "#/$defs/EngineConfig", - "default": { - "advanced": { - "codec_channel_capacity": null, - "demuxer_buffer_size": null, - "moq_peer_channel_capacity": null, - "stream_channel_capacity": null - }, - "node_input_capacity": null, - "oneshot": { - "io_channel_capacity": null, - "media_channel_capacity": null, - "packet_batch_size": 32 - }, - "packet_batch_size": 32, - "pin_distributor_capacity": null, - "profile": null - } - }, - "log": { - "$ref": "#/$defs/LogConfig", - "default": { - "console_enable": true, - "console_level": "info", - "file_enable": true, - "file_format": "text", - "file_level": "info", - "file_path": "./skit.log" - } - }, - "permissions": { - "$ref": "#/$defs/PermissionsConfig", - "default": { - "allow_insecure_no_auth": false, - "default_role": "admin", - "max_concurrent_oneshots": null, - "max_concurrent_sessions": null, - "role_header": null, - "roles": { - "admin": { - "access_all_sessions": true, - "allowed_assets": [ - "*" - ], - "allowed_nodes": [ - "*" - ], - "allowed_plugins": [ - "*" - ], - "allowed_samples": [ - "*" - ], - "create_sessions": true, - "delete_assets": true, - "delete_plugins": true, - "delete_samples": true, - "destroy_sessions": true, - "list_nodes": true, - "list_samples": true, - "list_sessions": true, - "load_plugins": true, - "modify_sessions": true, - "read_samples": true, - "tune_nodes": true, - "upload_assets": true, - "write_samples": true - }, - "user": { - "access_all_sessions": false, - "allowed_assets": [ - "samples/audio/system/*", - "samples/audio/user/*" - ], - "allowed_nodes": [ - "audio::*", - "containers::*", - "transport::moq::*", - "core::passthrough", - "core::file_reader", - "core::pacer", - "core::json_serialize", - "core::text_chunker", - "core::script", - "core::telemetry_tap", - "core::telemetry_out", - "core::sink", - "plugin::*" - ], - "allowed_plugins": [ - "plugin::*" - ], - "allowed_samples": [ - "oneshot/*.yml", - "oneshot/*.yaml", - "dynamic/*.yml", - "dynamic/*.yaml", - "user/*.yml", - "user/*.yaml" - ], - "create_sessions": true, - "delete_assets": true, - "delete_plugins": false, - "delete_samples": true, - "destroy_sessions": true, - "list_nodes": true, - "list_samples": true, - "list_sessions": true, - "load_plugins": false, - "modify_sessions": true, - "read_samples": true, - "tune_nodes": true, - "upload_assets": true, - "write_samples": true - }, - "viewer": { - "access_all_sessions": false, - "allowed_assets": [ - "samples/audio/system/*" - ], - "allowed_nodes": [ - "*" - ], - "allowed_plugins": [ - "*" - ], - "allowed_samples": [ - "oneshot/*.yml", - "oneshot/*.yaml", - "dynamic/*.yml", - "dynamic/*.yaml", - "user/*.yml", - "user/*.yaml" - ], - "create_sessions": false, - "delete_assets": false, - "delete_plugins": false, - "delete_samples": false, - "destroy_sessions": false, - "list_nodes": true, - "list_samples": true, - "list_sessions": true, - "load_plugins": false, - "modify_sessions": false, - "read_samples": true, - "tune_nodes": false, - "upload_assets": false, - "write_samples": false - } + "default": null } } }, - "plugins": { - "$ref": "#/$defs/PluginConfig", - "default": { - "allow_http_management": false, - "directory": ".plugins" + "Permissions": { + "description": "Represents a set of permissions granted to a role\n\nNote: We allow excessive bools here because permissions are inherently\nindependent boolean flags. Each field represents a distinct capability\nthat can be enabled or disabled. Converting to enums or state machines\nwould complicate the API without providing meaningful benefit.\nRole-based permissions for access control.", + "type": "object", + "properties": { + "create_sessions": { + "description": "Can create new sessions", + "type": "boolean", + "default": false + }, + "destroy_sessions": { + "description": "Can destroy sessions (their own or any depending on context)", + "type": "boolean", + "default": false + }, + "list_sessions": { + "description": "Can list sessions (their own or all depending on context)", + "type": "boolean", + "default": false + }, + "modify_sessions": { + "description": "Can modify running sessions (add/remove nodes)", + "type": "boolean", + "default": false + }, + "tune_nodes": { + "description": "Can tune parameters on running nodes", + "type": "boolean", + "default": false + }, + "load_plugins": { + "description": "Can upload and load plugins (WASM or native)", + "type": "boolean", + "default": false + }, + "delete_plugins": { + "description": "Can delete plugins", + "type": "boolean", + "default": false + }, + "list_nodes": { + "description": "Can view the list of available nodes", + "type": "boolean", + "default": false + }, + "list_samples": { + "description": "Can list sample pipelines", + "type": "boolean", + "default": false + }, + "read_samples": { + "description": "Can read sample pipeline YAML", + "type": "boolean", + "default": false + }, + "write_samples": { + "description": "Can save/update user pipelines in `[server].samples_dir/user`", + "type": "boolean", + "default": false + }, + "delete_samples": { + "description": "Can delete user pipelines in `[server].samples_dir/user`", + "type": "boolean", + "default": false + }, + "allowed_samples": { + "description": "Allowed sample pipeline paths (supports globs like \"oneshot/*.yml\").\n\nPaths are evaluated relative to `[server].samples_dir`.\nEmpty list means no samples are allowed (deny by default).\nUse `[\"*\"]` to allow everything.", + "type": "array", + "items": { + "type": "string" + }, + "default": [] + }, + "allowed_nodes": { + "description": "Allowed node types (e.g., \"audio::gain\", \"transport::moq::*\")\nEmpty list means no nodes are allowed (deny by default).\nUse `[\"*\"]` to allow everything.", + "type": "array", + "items": { + "type": "string" + }, + "default": [] + }, + "allowed_plugins": { + "description": "Allowed plugin node kinds (e.g., \"plugin::native::whisper\", \"plugin::wasm::gain\", \"plugin::*\")\nEmpty list means no plugins are allowed (deny by default).\nUse `[\"*\"]` to allow everything.", + "type": "array", + "items": { + "type": "string" + }, + "default": [] + }, + "access_all_sessions": { + "description": "Can access any user's sessions (admin capability)", + "type": "boolean", + "default": false + }, + "upload_assets": { + "description": "Can upload audio assets", + "type": "boolean", + "default": false + }, + "delete_assets": { + "description": "Can delete audio assets (user assets only)", + "type": "boolean", + "default": false + }, + "allowed_assets": { + "description": "Allowed audio asset paths (supports globs like \"samples/audio/system/*.opus\")\nEmpty list means no assets are allowed (deny by default).\nUse `[\"*\"]` to allow everything.", + "type": "array", + "items": { + "type": "string" + }, + "default": [] + } } }, - "resources": { - "$ref": "#/$defs/ResourceConfig", - "default": { - "keep_models_loaded": true, - "max_memory_mb": null, - "prewarm": { - "enabled": false, - "plugins": [] + "ScriptConfig": { + "description": "Configuration for the core::script node.", + "type": "object", + "properties": { + "default_timeout_ms": { + "description": "Default timeout for script execution per packet (in milliseconds)", + "type": "integer", + "format": "uint64", + "minimum": 0, + "default": 100 + }, + "default_memory_limit_mb": { + "description": "Default memory limit for QuickJS runtime (in megabytes)", + "type": "integer", + "format": "uint", + "minimum": 0, + "default": 64 + }, + "global_fetch_allowlist": { + "description": "Global fetch allowlist (empty = block all fetch() calls)\nApplies to all script nodes.\n\nSecurity note: there is no per-pipeline allowlist override; this prevents bypass via\nuser-provided pipelines.", + "type": "array", + "items": { + "$ref": "#/$defs/AllowlistRule" + }, + "default": [] + }, + "secrets": { + "description": "Available secrets (name → environment variable mapping)\nEmpty map = no secrets available to any script node\nSecrets are loaded from environment variables at server startup\nand can be injected into HTTP headers via pipeline configuration", + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/SecretConfig" + }, + "default": {} } } }, - "script": { - "$ref": "#/$defs/ScriptConfig", - "default": { - "default_memory_limit_mb": 64, - "default_timeout_ms": 100, - "global_fetch_allowlist": [], - "secrets": {} - } + "AllowlistRule": { + "description": "URL allowlist rule for fetch() API in script nodes.", + "type": "object", + "properties": { + "url": { + "description": "URL pattern with wildcards (e.g., \"https://api.example.com/*\")", + "type": "string" + }, + "methods": { + "description": "Allowed HTTP methods", + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "url", + "methods" + ] }, - "security": { - "$ref": "#/$defs/SecurityConfig", - "default": { - "allowed_file_paths": [ - "samples/**" - ], - "allowed_write_paths": [] - } + "SecretConfig": { + "description": "Configuration for a single secret loaded from environment.", + "type": "object", + "properties": { + "env": { + "description": "Environment variable name containing the secret value", + "type": "string" + }, + "type": { + "description": "Type of secret (for validation and formatting)", + "$ref": "#/$defs/SecretType", + "default": "string" + }, + "allowed_fetch_urls": { + "description": "Optional allowlist of URL patterns where this secret may be injected into `fetch()` headers.\n\nPatterns use the same format as `script.global_fetch_allowlist` entries:\n- `https://api.openai.com/*`\n- `https://api.openai.com/v1/chat/completions`\n\nEmpty = no additional restriction (backwards-compatible).", + "type": "array", + "items": { + "type": "string" + }, + "default": [] + }, + "description": { + "description": "Optional description for documentation", + "type": "string", + "default": "" + } + }, + "required": [ + "env" + ] }, - "server": { - "$ref": "#/$defs/ServerConfig", - "default": { - "address": "127.0.0.1:4545", - "base_path": null, - "cert_path": "", - "cors": { - "allowed_origins": [ - "http://localhost", - "https://localhost", - "http://localhost:*", - "https://localhost:*", - "http://127.0.0.1", - "https://127.0.0.1", - "http://127.0.0.1:*", - "https://127.0.0.1:*" - ] + "SecretType": { + "description": "Type of secret for validation and documentation.", + "oneOf": [ + { + "description": "URL (e.g., webhook URLs)", + "type": "string", + "const": "url" }, - "key_path": "", - "max_body_size": 104857600, - "samples_dir": "./samples/pipelines", - "tls": false - } + { + "description": "Bearer token", + "type": "string", + "const": "token" + }, + { + "description": "API key", + "type": "string", + "const": "apikey" + }, + { + "description": "Generic string", + "type": "string", + "const": "string" + } + ] }, - "telemetry": { - "$ref": "#/$defs/TelemetryConfig", - "default": { - "enable": true, - "otlp_endpoint": null, - "otlp_headers": {}, - "otlp_traces_endpoint": null, - "tokio_console": false, - "tracing_enable": false + "AuthConfig": { + "description": "Authentication configuration for built-in JWT-based auth.", + "type": "object", + "properties": { + "mode": { + "description": "Authentication mode (auto, enabled, disabled)", + "$ref": "#/$defs/AuthMode", + "default": "auto" + }, + "state_dir": { + "description": "Directory for auth state (keys, tokens). Default: \".streamkit/auth\"", + "type": "string", + "default": ".streamkit/auth" + }, + "cookie_name": { + "description": "Cookie name for browser sessions. Default: \"skit_session\"", + "type": "string", + "default": "skit_session" + }, + "api_default_ttl_secs": { + "description": "Default TTL for API tokens in seconds. Default: 86400 (24 hours)", + "type": "integer", + "format": "uint64", + "minimum": 0, + "default": 86400 + }, + "api_max_ttl_secs": { + "description": "Maximum TTL for API tokens in seconds. Default: 2592000 (30 days)", + "type": "integer", + "format": "uint64", + "minimum": 0, + "default": 2592000 + }, + "moq_default_ttl_secs": { + "description": "Default TTL for MoQ tokens in seconds. Default: 3600 (1 hour)", + "type": "integer", + "format": "uint64", + "minimum": 0, + "default": 3600 + }, + "moq_max_ttl_secs": { + "description": "Maximum TTL for MoQ tokens in seconds. Default: 86400 (1 day)", + "type": "integer", + "format": "uint64", + "minimum": 0, + "default": 86400 + } } + }, + "AuthMode": { + "description": "Authentication mode for the server.", + "oneOf": [ + { + "description": "Auto: disabled on loopback, enabled on non-loopback", + "type": "string", + "const": "auto" + }, + { + "description": "Always require authentication", + "type": "string", + "const": "enabled" + }, + { + "description": "Disable authentication entirely (NOT recommended for production)", + "type": "string", + "const": "disabled" + } + ] } - }, - "title": "Config", - "type": "object" + } } ``` diff --git a/docs/src/content/docs/reference/configuration.md b/docs/src/content/docs/reference/configuration.md index 5ea26320..9bc5e6a0 100644 --- a/docs/src/content/docs/reference/configuration.md +++ b/docs/src/content/docs/reference/configuration.md @@ -62,8 +62,21 @@ HTTP server configuration. |--------|------|---------|-------------| | `directory` | string | `.plugins` | Plugin base directory | | `allow_http_management` | bool | `false` | Allow plugin upload/delete via HTTP APIs (enable only in trusted environments) | +| `marketplace_enabled` | bool | `false` | Enable marketplace browsing/install endpoints and UI | +| `allow_native_marketplace` | bool | `false` | Allow native plugins to be installed from marketplaces | +| `trusted_pubkeys` | string[] | `[]` | Minisign public keys trusted for marketplace manifests | +| `registries` | string[] | `[]` | Registry index URLs | +| `models_dir` | string? | `null` | Directory for downloaded models (defaults to `models` when unset) | +| `huggingface_token` | string? | `null` | Hugging Face token for gated model downloads | +| `allow_model_urls` | bool | `false` | Allow `source: "url"` model downloads from manifests | +| `marketplace_require_registry_origin` | bool | `false` | Require marketplace URLs to share origin with the registry index | +| `marketplace_url_allowlist` | string[] | `[]` | Allowlisted marketplace origins (same-origin relaxer, does not bypass HTTPS/host checks) | +| `marketplace_scheme_policy` | string | `https_only` | Scheme policy for marketplace URLs (`https_only` or `allow_http`) | +| `marketplace_host_policy` | string | `public_only` | Host policy for marketplace URLs (`public_only` or `allow_private`) | +| `marketplace_resolve_hostnames` | bool | `false` | Resolve hostnames and block private/loopback addresses (best-effort) | Plugins are stored in subfolders: `native/` for `.so`/`.dylib`/`.dll`, `wasm/` for `.wasm`. +Marketplace installs are extracted under `bundles/` with active records in `active/`. ## `[resources]` diff --git a/docs/src/content/docs/reference/http-api.md b/docs/src/content/docs/reference/http-api.md index 2d1ee4b6..b835df15 100644 --- a/docs/src/content/docs/reference/http-api.md +++ b/docs/src/content/docs/reference/http-api.md @@ -116,6 +116,50 @@ Uploaded plugins are registered under: - `plugin::native::` for native libraries - `plugin::wasm::` for WASM components +## Marketplace + +Marketplace browsing (admin-only): + +- `GET /api/v1/marketplace/registries` +- `GET /api/v1/marketplace/plugins?registry=&q=` +- `GET /api/v1/marketplace/plugins/{plugin_id}?registry=&version=` + +Marketplace URL security defaults: + +- HTTPS required, localhost/private/link-local/multicast hosts blocked +- same-origin enforcement for manifest/signature/bundle URLs is optional +- allowlists never bypass HTTPS or host/IP blocking +- redirects are validated per-hop, so allowlist must cover every host in the chain (e.g. GitHub + Releases uses `github.com` plus `objects.githubusercontent.com` or `release-assets.githubusercontent.com`) + +Install jobs: + +- `POST /api/v1/plugins/install` + - Body: `{ "registry": "...", "plugin_id": "...", "version"?: "...", "install_models"?: bool, "model_ids"?: string[] }` +- `GET /api/v1/jobs/{job_id}` +- `POST /api/v1/jobs/{job_id}/cancel` + +Example job response: + +```json +{ + "status": "running", + "started_at_ms": 1730000000000, + "updated_at_ms": 1730000005000, + "summary": "Downloading bundle", + "steps": [ + { + "name": "download_bundle", + "status": "running", + "progress": { + "bytes_done": 1048576, + "bytes_total": 2097152 + } + } + ] +} +``` + ## Sample Pipelines Sample pipelines are used by the UI. They live under `[server].samples_dir` (default: `./samples/pipelines`). Permission allowlists for samples (`allowed_samples`) are evaluated against paths relative to that directory (e.g. `oneshot/*.yml`). diff --git a/docs/src/content/docs/reference/nodes/audio-gain.md b/docs/src/content/docs/reference/nodes/audio-gain.md index 97ece690..3bef4ac8 100644 --- a/docs/src/content/docs/reference/nodes/audio-gain.md +++ b/docs/src/content/docs/reference/nodes/audio-gain.md @@ -32,19 +32,19 @@ Adjusts audio volume by applying a linear gain multiplier to all samples. Suppor ```json { "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "AudioGainConfig", "description": "The configuration struct for the AudioGainNode.", + "type": "object", "properties": { "gain": { - "default": 1.0, "description": "A linear multiplier for the audio amplitude (e.g., 0.5 is -6dB).\nThis parameter can be updated in real-time while the node is running.\nValid range: 0.0 to 4.0", - "maximum": 4.0, + "type": "number", + "default": 1.0, "minimum": 0.0, - "tunable": true, - "type": "number" + "maximum": 4.0, + "tunable": true } - }, - "title": "AudioGainConfig", - "type": "object" + } } ``` diff --git a/docs/src/content/docs/reference/nodes/audio-mixer.md b/docs/src/content/docs/reference/nodes/audio-mixer.md index e8404238..ed943323 100644 --- a/docs/src/content/docs/reference/nodes/audio-mixer.md +++ b/docs/src/content/docs/reference/nodes/audio-mixer.md @@ -33,76 +33,76 @@ Combines multiple audio streams into a single output by summing samples. Support ```json { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "AudioMixerConfig", + "description": "Configuration for the AudioMixerNode.", + "type": "object", + "properties": { + "sync_timeout_ms": { + "description": "Timeout in milliseconds for waiting for slow inputs.\nIf specified, the mixer will wait up to this duration for all active pins to provide frames.\nIf timeout expires, missing pins will be mixed as silence.\nIf not specified (None), the mixer will wait indefinitely (strict broadcast synchronization).\nDefault: Some(100)", + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 0, + "default": 100 + }, + "num_inputs": { + "description": "Number of input pins to pre-create.\nRequired for stateless/oneshot pipelines where pins must exist before graph building.\nOptional for dynamic pipelines where pins are created on-demand.\nIf specified, pins will be named in_0, in_1, ..., in_{N-1}.", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 0, + "default": null + }, + "clocked": { + "description": "Enable clocked mixing mode (dedicated mixing thread + per-input jitter buffers).\n\nWhen enabled, the mixer emits frames on a fixed cadence determined by\n`sample_rate` and `frame_samples_per_channel`.", + "anyOf": [ + { + "$ref": "#/$defs/ClockedMixerConfig" + }, + { + "type": "null" + } + ] + } + }, "$defs": { "ClockedMixerConfig": { + "type": "object", "properties": { + "sample_rate": { + "description": "Output sample rate (Hz). Inputs are expected to already match this.", + "type": "integer", + "format": "uint32", + "minimum": 0, + "default": 48000 + }, "frame_samples_per_channel": { - "default": 960, "description": "Fixed frame size (samples per channel) for the clocked mixer.\n\nExample: `960` @ `48000` Hz => 20ms frames.", + "type": "integer", "format": "uint", "minimum": 0, - "type": "integer" - }, - "generate_silence": { - "default": true, - "description": "If true, emit silence frames on ticks even when no inputs have data.\n\nIf false, the clocked mixer only emits output on ticks where at least one input\ncontributes a frame.", - "type": "boolean" + "default": 960 }, "jitter_buffer_frames": { - "default": 3, "description": "Per-input jitter buffer depth (in frames).\n\nFrames are queued in order. When full, the oldest frame is dropped (overwrite-oldest).\n\nRecommended: 2-3 for ~40-60ms jitter tolerance at 20ms frames.", + "type": "integer", "format": "uint", "minimum": 0, - "type": "integer" + "default": 3 }, - "sample_rate": { - "default": 48000, - "description": "Output sample rate (Hz). Inputs are expected to already match this.", - "format": "uint32", - "minimum": 0, - "type": "integer" - } - }, - "type": "object" - } - }, - "$schema": "https://json-schema.org/draft/2020-12/schema", - "description": "Configuration for the AudioMixerNode.", - "properties": { - "clocked": { - "anyOf": [ - { - "$ref": "#/$defs/ClockedMixerConfig" - }, - { - "type": "null" + "generate_silence": { + "description": "If true, emit silence frames on ticks even when no inputs have data.\n\nIf false, the clocked mixer only emits output on ticks where at least one input\ncontributes a frame.", + "type": "boolean", + "default": true } - ], - "description": "Enable clocked mixing mode (dedicated mixing thread + per-input jitter buffers).\n\nWhen enabled, the mixer emits frames on a fixed cadence determined by\n`sample_rate` and `frame_samples_per_channel`." - }, - "num_inputs": { - "default": null, - "description": "Number of input pins to pre-create.\nRequired for stateless/oneshot pipelines where pins must exist before graph building.\nOptional for dynamic pipelines where pins are created on-demand.\nIf specified, pins will be named in_0, in_1, ..., in_{N-1}.", - "format": "uint", - "minimum": 0, - "type": [ - "integer", - "null" - ] - }, - "sync_timeout_ms": { - "default": 100, - "description": "Timeout in milliseconds for waiting for slow inputs.\nIf specified, the mixer will wait up to this duration for all active pins to provide frames.\nIf timeout expires, missing pins will be mixed as silence.\nIf not specified (None), the mixer will wait indefinitely (strict broadcast synchronization).\nDefault: Some(100)", - "format": "uint64", - "minimum": 0, - "type": [ - "integer", - "null" - ] + } } - }, - "title": "AudioMixerConfig", - "type": "object" + } } ``` diff --git a/docs/src/content/docs/reference/nodes/audio-opus-encoder.md b/docs/src/content/docs/reference/nodes/audio-opus-encoder.md index de699d77..852dd7bb 100644 --- a/docs/src/content/docs/reference/nodes/audio-opus-encoder.md +++ b/docs/src/content/docs/reference/nodes/audio-opus-encoder.md @@ -33,18 +33,18 @@ Encodes raw PCM audio into Opus-compressed packets. Configurable bitrate, applic ```json { "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "OpusEncoderConfig", + "type": "object", "properties": { "bitrate": { - "default": 64000, - "maximum": 510000, + "type": "integer", "minimum": 6000, + "maximum": 510000, "multipleOf": 1000, - "tunable": false, - "type": "integer" + "default": 64000, + "tunable": false } - }, - "title": "OpusEncoderConfig", - "type": "object" + } } ``` diff --git a/docs/src/content/docs/reference/nodes/audio-pacer.md b/docs/src/content/docs/reference/nodes/audio-pacer.md index 3109057c..fb87d7cb 100644 --- a/docs/src/content/docs/reference/nodes/audio-pacer.md +++ b/docs/src/content/docs/reference/nodes/audio-pacer.md @@ -36,49 +36,49 @@ Controls audio playback timing by releasing frames at their natural rate. Useful ```json { "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "AudioPacerConfig", "description": "Configuration for the AudioPacerNode", + "type": "object", "properties": { + "speed": { + "description": "Playback speed multiplier (1.0 = real-time, 2.0 = 2x speed, 0.5 = half speed)", + "type": "number", + "format": "float", + "default": 1.0 + }, "buffer_size": { - "default": 32, "description": "Maximum number of audio frames to buffer internally\nDefault: 32 frames (~640ms of audio at 20ms/frame)", + "type": "integer", "format": "uint", "minimum": 1, - "type": "integer" + "default": 32 }, "generate_silence": { - "default": true, "description": "Generate silence frames when input queue is empty to maintain continuous stream\nPrevents gaps in audio output (useful for real-time streaming protocols like MoQ)\nDefault: true", - "type": "boolean" + "type": "boolean", + "default": true }, - "initial_channels": { - "default": null, - "format": "uint16", - "maximum": 65535, - "minimum": 0, + "initial_sample_rate": { + "description": "Optional initial audio format used to start pacing immediately (before the first input frame).\n\nWithout an initial format, the pacer learns `(sample_rate, channels)` from the first\nreceived frame. For pipelines that may take seconds before producing the first frame\n(e.g., STT → LLM → TTS), this can cause downstream consumers to see a long gap and\nunderflow. Setting these lets the pacer emit silence right away.", "type": [ "integer", "null" - ] - }, - "initial_sample_rate": { - "default": null, - "description": "Optional initial audio format used to start pacing immediately (before the first input frame).\n\nWithout an initial format, the pacer learns `(sample_rate, channels)` from the first\nreceived frame. For pipelines that may take seconds before producing the first frame\n(e.g., STT → LLM → TTS), this can cause downstream consumers to see a long gap and\nunderflow. Setting these lets the pacer emit silence right away.", + ], "format": "uint32", "minimum": 0, + "default": null + }, + "initial_channels": { "type": [ "integer", "null" - ] - }, - "speed": { - "default": 1.0, - "description": "Playback speed multiplier (1.0 = real-time, 2.0 = 2x speed, 0.5 = half speed)", - "format": "float", - "type": "number" + ], + "format": "uint16", + "minimum": 0, + "maximum": 65535, + "default": null } - }, - "title": "AudioPacerConfig", - "type": "object" + } } ``` diff --git a/docs/src/content/docs/reference/nodes/audio-resampler.md b/docs/src/content/docs/reference/nodes/audio-resampler.md index f002072b..4e1380e1 100644 --- a/docs/src/content/docs/reference/nodes/audio-resampler.md +++ b/docs/src/content/docs/reference/nodes/audio-resampler.md @@ -34,34 +34,34 @@ Converts audio between different sample rates using high-quality resampling. Ess ```json { "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "AudioResamplerConfig", "description": "Configuration for the AudioResamplerNode", + "type": "object", "properties": { + "target_sample_rate": { + "description": "Target output sample rate in Hz (e.g., 48000, 24000, 16000)\nInput audio will be resampled to this rate\nMust be greater than 0", + "type": "integer", + "format": "uint32", + "minimum": 1 + }, "chunk_frames": { - "default": 960, "description": "Fixed chunk size for resampler (default: 960 frames = 20ms at 48kHz)\nLarger values = better efficiency but more latency", + "type": "integer", "format": "uint", "minimum": 1, - "type": "integer" + "default": 960 }, "output_frame_size": { - "default": 960, "description": "Output frame size - packets will be buffered to this exact size (default: 960 = 20ms at 48kHz)\nMust be a valid Opus frame size: 120, 240, 480, 960, 1920, or 2880 samples\nSet to 0 to disable output buffering (variable frame sizes)", + "type": "integer", "format": "uint", "minimum": 0, - "type": "integer" - }, - "target_sample_rate": { - "description": "Target output sample rate in Hz (e.g., 48000, 24000, 16000)\nInput audio will be resampled to this rate\nMust be greater than 0", - "format": "uint32", - "minimum": 1, - "type": "integer" + "default": 960 } }, "required": [ "target_sample_rate" - ], - "title": "AudioResamplerConfig", - "type": "object" + ] } ``` diff --git a/docs/src/content/docs/reference/nodes/containers-ogg-muxer.md b/docs/src/content/docs/reference/nodes/containers-ogg-muxer.md index 75315a7c..e474e925 100644 --- a/docs/src/content/docs/reference/nodes/containers-ogg-muxer.md +++ b/docs/src/content/docs/reference/nodes/containers-ogg-muxer.md @@ -34,43 +34,43 @@ Muxes Opus audio packets into an Ogg container. Produces streamable Ogg/Opus out ```json { - "$defs": { - "OggMuxerCodec": { - "enum": [ - "opus" - ], - "type": "string" - } - }, "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "OggMuxerConfig", + "type": "object", "properties": { + "stream_serial": { + "type": "integer", + "format": "uint32", + "minimum": 0, + "default": 0 + }, + "codec": { + "$ref": "#/$defs/OggMuxerCodec" + }, "channels": { - "default": 1, "description": "Number of audio channels (1 for mono, 2 for stereo). Defaults to 1.", + "type": "integer", "format": "uint8", - "maximum": 255, "minimum": 0, - "type": "integer" + "maximum": 255, + "default": 1 }, "chunk_size": { - "default": 65536, "description": "The number of bytes to buffer before flushing to the output. Defaults to 65536.", + "type": "integer", "format": "uint", "minimum": 0, - "type": "integer" - }, - "codec": { - "$ref": "#/$defs/OggMuxerCodec" - }, - "stream_serial": { - "default": 0, - "format": "uint32", - "minimum": 0, - "type": "integer" + "default": 65536 } }, - "title": "OggMuxerConfig", - "type": "object" + "$defs": { + "OggMuxerCodec": { + "type": "string", + "enum": [ + "opus" + ] + } + } } ``` diff --git a/docs/src/content/docs/reference/nodes/containers-webm-muxer.md b/docs/src/content/docs/reference/nodes/containers-webm-muxer.md index 7894077e..c321ac1b 100644 --- a/docs/src/content/docs/reference/nodes/containers-webm-muxer.md +++ b/docs/src/content/docs/reference/nodes/containers-webm-muxer.md @@ -34,52 +34,52 @@ Muxes Opus audio into a WebM container. Produces streamable WebM/Opus output com ```json { - "$defs": { - "WebMStreamingMode": { - "oneOf": [ - { - "const": "live", - "description": "Live streaming mode - optimized for real-time streaming, no duration/seeking info (default)", - "type": "string" - }, - { - "const": "file", - "description": "File mode - includes full duration and seeking information", - "type": "string" - } - ] - } - }, "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "WebMMuxerConfig", + "type": "object", "properties": { + "sample_rate": { + "description": "Audio sample rate in Hz", + "type": "integer", + "format": "uint32", + "minimum": 0, + "default": 48000 + }, "channels": { - "default": 2, "description": "Number of audio channels (1 for mono, 2 for stereo)", + "type": "integer", "format": "uint32", "minimum": 0, - "type": "integer" + "default": 2 }, "chunk_size": { - "default": 65536, "description": "The number of bytes to buffer before flushing to the output. Defaults to 65536.", + "type": "integer", "format": "uint", "minimum": 0, - "type": "integer" - }, - "sample_rate": { - "default": 48000, - "description": "Audio sample rate in Hz", - "format": "uint32", - "minimum": 0, - "type": "integer" + "default": 65536 }, "streaming_mode": { - "$ref": "#/$defs/WebMStreamingMode", - "description": "Streaming mode: \"live\" for real-time streaming (no duration), \"file\" for complete files with duration (default)" + "description": "Streaming mode: \"live\" for real-time streaming (no duration), \"file\" for complete files with duration (default)", + "$ref": "#/$defs/WebMStreamingMode" } }, - "title": "WebMMuxerConfig", - "type": "object" + "$defs": { + "WebMStreamingMode": { + "oneOf": [ + { + "description": "Live streaming mode - optimized for real-time streaming, no duration/seeking info (default)", + "type": "string", + "const": "live" + }, + { + "description": "File mode - includes full duration and seeking information", + "type": "string", + "const": "file" + } + ] + } + } } ``` diff --git a/docs/src/content/docs/reference/nodes/core-file-reader.md b/docs/src/content/docs/reference/nodes/core-file-reader.md index 9fdc9364..ab409122 100644 --- a/docs/src/content/docs/reference/nodes/core-file-reader.md +++ b/docs/src/content/docs/reference/nodes/core-file-reader.md @@ -33,25 +33,25 @@ No inputs. ```json { "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "FileReadConfig", "description": "Configuration for the FileReadNode", + "type": "object", "properties": { + "path": { + "description": "Path to the file to read", + "type": "string" + }, "chunk_size": { - "default": 8192, "description": "Size of chunks to read (default: 8192 bytes)", + "type": "integer", "format": "uint", "minimum": 0, - "type": "integer" - }, - "path": { - "description": "Path to the file to read", - "type": "string" + "default": 8192 } }, "required": [ "path" - ], - "title": "FileReadConfig", - "type": "object" + ] } ``` diff --git a/docs/src/content/docs/reference/nodes/core-file-writer.md b/docs/src/content/docs/reference/nodes/core-file-writer.md index 59883627..9177f827 100644 --- a/docs/src/content/docs/reference/nodes/core-file-writer.md +++ b/docs/src/content/docs/reference/nodes/core-file-writer.md @@ -33,25 +33,25 @@ No outputs. ```json { "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "FileWriteConfig", "description": "Configuration for the FileWriteNode", + "type": "object", "properties": { + "path": { + "description": "Path to the file to write", + "type": "string" + }, "chunk_size": { - "default": 8192, "description": "Size of buffer before writing to disk (default: 8192 bytes)", + "type": "integer", "format": "uint", "minimum": 0, - "type": "integer" - }, - "path": { - "description": "Path to the file to write", - "type": "string" + "default": 8192 } }, "required": [ "path" - ], - "title": "FileWriteConfig", - "type": "object" + ] } ``` diff --git a/docs/src/content/docs/reference/nodes/core-json-serialize.md b/docs/src/content/docs/reference/nodes/core-json-serialize.md index c39bf651..1ae8f6e2 100644 --- a/docs/src/content/docs/reference/nodes/core-json-serialize.md +++ b/docs/src/content/docs/reference/nodes/core-json-serialize.md @@ -33,21 +33,21 @@ Converts structured packets (Text, Transcription) to JSON-formatted text. Useful ```json { "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "JsonSerializeConfig", "description": "Configuration for JSON serialization", + "type": "object", "properties": { - "newline_delimited": { - "default": false, - "description": "Add newline after each JSON object (for NDJSON format)", - "type": "boolean" - }, "pretty": { - "default": false, "description": "Enable pretty-printing (formatted with indentation)", - "type": "boolean" + "type": "boolean", + "default": false + }, + "newline_delimited": { + "description": "Add newline after each JSON object (for NDJSON format)", + "type": "boolean", + "default": false } - }, - "title": "JsonSerializeConfig", - "type": "object" + } } ``` diff --git a/docs/src/content/docs/reference/nodes/core-pacer.md b/docs/src/content/docs/reference/nodes/core-pacer.md index df2548f9..dc46edbc 100644 --- a/docs/src/content/docs/reference/nodes/core-pacer.md +++ b/docs/src/content/docs/reference/nodes/core-pacer.md @@ -34,31 +34,31 @@ Controls packet flow rate by releasing packets at specified intervals. Useful fo ```json { "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "PacerConfig", "description": "Configuration for the PacerNode", + "type": "object", "properties": { + "speed": { + "description": "Playback speed multiplier (1.0 = real-time, 2.0 = 2x speed, 0.5 = half speed)", + "type": "number", + "format": "float", + "default": 1.0 + }, "buffer_size": { - "default": 16, "description": "Maximum number of packets to buffer internally (for backpressure control)\nHigher values = more memory, smoother pacing. Lower values = less memory, more backpressure.\nDefault: 16 packets (~320ms of audio at 20ms/frame)", + "type": "integer", "format": "uint", "minimum": 1, - "type": "integer" + "default": 16 }, "initial_burst_packets": { - "default": 0, "description": "Number of initial packets to send at 10x speed before starting paced delivery.\nThis builds up a client-side buffer to absorb network jitter.\nDefault: 0 (no initial burst). Recommended: 5-25 packets for networked streaming.", + "type": "integer", "format": "uint", "minimum": 0, - "type": "integer" - }, - "speed": { - "default": 1.0, - "description": "Playback speed multiplier (1.0 = real-time, 2.0 = 2x speed, 0.5 = half speed)", - "format": "float", - "type": "number" + "default": 0 } - }, - "title": "PacerConfig", - "type": "object" + } } ``` diff --git a/docs/src/content/docs/reference/nodes/core-script.md b/docs/src/content/docs/reference/nodes/core-script.md index 33a15228..e5cd4546 100644 --- a/docs/src/content/docs/reference/nodes/core-script.md +++ b/docs/src/content/docs/reference/nodes/core-script.md @@ -43,72 +43,72 @@ Execute custom JavaScript code for API integration, webhooks, text transformatio ```json { - "$defs": { - "HeaderMapping": { - "description": "Maps a server-configured secret to an HTTP header for fetch() calls", - "properties": { - "header": { - "description": "HTTP header name (e.g., \"Authorization\", \"X-API-Key\")", - "type": "string" - }, - "secret": { - "description": "Secret name (must exist in server config's [script.secrets])", - "type": "string" - }, - "template": { - "default": "{}", - "description": "Optional template for formatting the header value\nUse {} as placeholder for the secret value\nExamples: \"Bearer {}\", \"token {}\", \"ApiKey {}\"\nDefault: \"{}\" (raw value)", - "type": "string" - } - }, - "required": [ - "secret", - "header" - ], - "type": "object" - } - }, "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "ScriptConfig", "description": "Configuration for the script node", + "type": "object", "properties": { - "headers": { - "default": [], - "description": "Header mappings for fetch() calls\nMaps secret names to HTTP headers with optional templates", - "items": { - "$ref": "#/$defs/HeaderMapping" - }, - "type": "array" - }, - "memory_limit_mb": { - "default": 64, - "description": "QuickJS memory limit in MB (default: 64MB)", - "format": "uint", - "minimum": 0, - "type": "integer" - }, "script": { - "default": "", "description": "JavaScript code (must define a process(packet) function)", - "type": "string" + "type": "string", + "default": "" }, "script_path": { - "default": null, "description": "Optional path to a JavaScript file to load as the script.\n\nIf set, the file contents are loaded at node creation time.\nFor security, the StreamKit server validates this path against `security.allowed_file_paths`.", "type": [ "string", "null" - ] + ], + "default": null }, "timeout_ms": { - "default": 100, "description": "Per-packet timeout in milliseconds (default: 100ms)", + "type": "integer", "format": "uint64", "minimum": 0, - "type": "integer" + "default": 100 + }, + "memory_limit_mb": { + "description": "QuickJS memory limit in MB (default: 64MB)", + "type": "integer", + "format": "uint", + "minimum": 0, + "default": 64 + }, + "headers": { + "description": "Header mappings for fetch() calls\nMaps secret names to HTTP headers with optional templates", + "type": "array", + "items": { + "$ref": "#/$defs/HeaderMapping" + }, + "default": [] } }, - "title": "ScriptConfig", - "type": "object" + "$defs": { + "HeaderMapping": { + "description": "Maps a server-configured secret to an HTTP header for fetch() calls", + "type": "object", + "properties": { + "secret": { + "description": "Secret name (must exist in server config's [script.secrets])", + "type": "string" + }, + "header": { + "description": "HTTP header name (e.g., \"Authorization\", \"X-API-Key\")", + "type": "string" + }, + "template": { + "description": "Optional template for formatting the header value\nUse {} as placeholder for the secret value\nExamples: \"Bearer {}\", \"token {}\", \"ApiKey {}\"\nDefault: \"{}\" (raw value)", + "type": "string", + "default": "{}" + } + }, + "required": [ + "secret", + "header" + ] + } + } } ``` diff --git a/docs/src/content/docs/reference/nodes/core-telemetry-out.md b/docs/src/content/docs/reference/nodes/core-telemetry-out.md index 956be2dc..ff50b874 100644 --- a/docs/src/content/docs/reference/nodes/core-telemetry-out.md +++ b/docs/src/content/docs/reference/nodes/core-telemetry-out.md @@ -34,36 +34,36 @@ No outputs. ```json { "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "TelemetryOutConfig", + "type": "object", "properties": { + "packet_types": { + "description": "Which packet types to convert to telemetry.\nDefault: `[\"Transcription\", \"Custom\"]`", + "type": "array", + "items": { + "type": "string" + }, + "default": [ + "Transcription", + "Custom" + ] + }, "event_type_filter": { - "default": [], "description": "Filter event types (glob-style prefix patterns like `vad.*`).\nEmpty list means all events are included.", + "type": "array", "items": { "type": "string" }, - "type": "array" + "default": [] }, "max_events_per_sec": { - "default": 100, "description": "Maximum events per second per event type.", + "type": "integer", "format": "uint32", "minimum": 0, - "type": "integer" - }, - "packet_types": { - "default": [ - "Transcription", - "Custom" - ], - "description": "Which packet types to convert to telemetry.\nDefault: `[\"Transcription\", \"Custom\"]`", - "items": { - "type": "string" - }, - "type": "array" + "default": 100 } - }, - "title": "TelemetryOutConfig", - "type": "object" + } } ``` diff --git a/docs/src/content/docs/reference/nodes/core-telemetry-tap.md b/docs/src/content/docs/reference/nodes/core-telemetry-tap.md index 8e8e23fa..8ba19c09 100644 --- a/docs/src/content/docs/reference/nodes/core-telemetry-tap.md +++ b/docs/src/content/docs/reference/nodes/core-telemetry-tap.md @@ -35,44 +35,44 @@ Observes packets and emits telemetry events for debugging and timeline visualiza ```json { "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "TelemetryTapConfig", "description": "Configuration for the telemetry tap node.", + "type": "object", "properties": { - "audio_sample_interval_ms": { - "default": 1000, - "description": "Audio sampling interval in milliseconds (for Audio packets).\nSet to 0 to disable audio level events.", - "format": "uint64", - "minimum": 0, - "type": "integer" + "packet_types": { + "description": "Which packet types to convert to telemetry.\nDefault: `[\"Transcription\", \"Custom\"]`", + "type": "array", + "items": { + "type": "string" + }, + "default": [ + "Transcription", + "Custom" + ] }, "event_type_filter": { - "default": [], "description": "Filter Custom packets by event_type pattern (glob-style).\nEmpty list means all Custom packets are included.", + "type": "array", "items": { "type": "string" }, - "type": "array" + "default": [] }, "max_events_per_sec": { - "default": 100, "description": "Maximum events per second per event type.", + "type": "integer", "format": "uint32", "minimum": 0, - "type": "integer" + "default": 100 }, - "packet_types": { - "default": [ - "Transcription", - "Custom" - ], - "description": "Which packet types to convert to telemetry.\nDefault: `[\"Transcription\", \"Custom\"]`", - "items": { - "type": "string" - }, - "type": "array" + "audio_sample_interval_ms": { + "description": "Audio sampling interval in milliseconds (for Audio packets).\nSet to 0 to disable audio level events.", + "type": "integer", + "format": "uint64", + "minimum": 0, + "default": 1000 } - }, - "title": "TelemetryTapConfig", - "type": "object" + } } ``` diff --git a/docs/src/content/docs/reference/nodes/core-text-chunker.md b/docs/src/content/docs/reference/nodes/core-text-chunker.md index c9f1851f..dd642c0e 100644 --- a/docs/src/content/docs/reference/nodes/core-text-chunker.md +++ b/docs/src/content/docs/reference/nodes/core-text-chunker.md @@ -33,50 +33,50 @@ Splits text into smaller chunks at sentence or clause boundaries. Essential for ```json { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "TextChunkerConfig", + "type": "object", + "properties": { + "split_mode": { + "description": "Splitting mode: \"sentences\" or \"words\"", + "$ref": "#/$defs/SplitMode" + }, + "min_length": { + "description": "Minimum chunk length before emitting (used in sentence mode)", + "type": "integer", + "format": "uint", + "minimum": 0, + "default": 10 + }, + "chunk_words": { + "description": "Number of words per chunk (used in word mode)", + "type": "integer", + "format": "uint", + "minimum": 0, + "default": 5 + } + }, "$defs": { "SplitMode": { "oneOf": [ { - "const": "sentences", "description": "Split on sentence boundaries (. ! ? etc.)", - "type": "string" + "type": "string", + "const": "sentences" }, { - "const": "clauses", "description": "Split on sentences AND pauses (commas, dashes, semicolons) for natural streaming", - "type": "string" + "type": "string", + "const": "clauses" }, { - "const": "words", "description": "Split after N words for lower latency (not recommended for TTS)", - "type": "string" + "type": "string", + "const": "words" } ] } - }, - "$schema": "https://json-schema.org/draft/2020-12/schema", - "properties": { - "chunk_words": { - "default": 5, - "description": "Number of words per chunk (used in word mode)", - "format": "uint", - "minimum": 0, - "type": "integer" - }, - "min_length": { - "default": 10, - "description": "Minimum chunk length before emitting (used in sentence mode)", - "format": "uint", - "minimum": 0, - "type": "integer" - }, - "split_mode": { - "$ref": "#/$defs/SplitMode", - "description": "Splitting mode: \"sentences\" or \"words\"" - } - }, - "title": "TextChunkerConfig", - "type": "object" + } } ``` diff --git a/docs/src/content/docs/reference/nodes/streamkit-http-input.md b/docs/src/content/docs/reference/nodes/streamkit-http-input.md index 5b8b2433..66323357 100644 --- a/docs/src/content/docs/reference/nodes/streamkit-http-input.md +++ b/docs/src/content/docs/reference/nodes/streamkit-http-input.md @@ -18,22 +18,62 @@ Synthetic input node for oneshot HTTP pipelines. Receives binary data from the H No inputs. ### Outputs -- Single-field mode: one `Binary` pin named after `field` (defaults to `media` when a single `http_input` exists). -- Multi-field mode: one `Binary` pin per `fields` entry. Pin names match the field names and **no legacy `media` pin is added**. +- `out` produces `Binary` (broadcast) ## Parameters -- `field` (`string`, optional) — Multipart field name to bind to this input. Defaults to `media` when there is only one `http_input` node; otherwise defaults to the node id. -- `fields` (`array`, optional) — List of multipart fields for this node. Each entry can be a string or `{ name, required }`. When set, only these fields are accepted and the legacy `media` field is disabled. `field` and `fields` are mutually exclusive. -- `required` (`boolean`, default: `true`) — When `true`, the request must include this field. Ignored when `fields` is provided (use per-entry `required` instead). - -When `fields` is provided, this node exposes multiple output pins, one per field. Pin names match the field names, allowing you to wire each uploaded stream independently. The legacy `media` pin is not added in this mode. +| Name | Type | Required | Default | Description | +| --- | --- | --- | --- | --- | +| `field` | `string` | no | — | Multipart field name to bind to this input. Defaults to 'media' when only one http_input node exists; otherwise defaults to the node id. | +| `fields` | `array` | no | — | Optional list of multipart fields for this node. When set, the node exposes one output pin per entry (pin name matches the field name). Entries may be strings or objects with { name, required }. | +| `required` | `boolean` | no | `true` | If true (default), the request must include this field. |
Raw JSON Schema ```json -{} +{ + "type": "object", + "additionalProperties": false, + "properties": { + "field": { + "type": "string", + "description": "Multipart field name to bind to this input. Defaults to 'media' when only one http_input node exists; otherwise defaults to the node id." + }, + "fields": { + "type": "array", + "description": "Optional list of multipart fields for this node. When set, the node exposes one output pin per entry (pin name matches the field name). Entries may be strings or objects with { name, required }.", + "items": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + }, + "required": { + "type": "boolean", + "default": true + } + }, + "required": [ + "name" + ] + } + ] + } + }, + "required": { + "type": "boolean", + "description": "If true (default), the request must include this field.", + "default": true + } + } +} ```
diff --git a/docs/src/content/docs/reference/nodes/transport-http-fetcher.md b/docs/src/content/docs/reference/nodes/transport-http-fetcher.md index b1d34de4..827d8cc9 100644 --- a/docs/src/content/docs/reference/nodes/transport-http-fetcher.md +++ b/docs/src/content/docs/reference/nodes/transport-http-fetcher.md @@ -33,25 +33,25 @@ No inputs. ```json { "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "HttpPullConfig", "description": "Configuration for the HttpPullNode", + "type": "object", "properties": { + "url": { + "description": "URL to fetch (HTTP or HTTPS)", + "type": "string" + }, "chunk_size": { - "default": 8192, "description": "Size of chunks to read (default: 8192 bytes)", + "type": "integer", "format": "uint", "minimum": 1, - "type": "integer" - }, - "url": { - "description": "URL to fetch (HTTP or HTTPS)", - "type": "string" + "default": 8192 } }, "required": [ "url" - ], - "title": "HttpPullConfig", - "type": "object" + ] } ``` diff --git a/docs/src/content/docs/reference/nodes/transport-moq-peer.md b/docs/src/content/docs/reference/nodes/transport-moq-peer.md index 19024f49..040b2e98 100644 --- a/docs/src/content/docs/reference/nodes/transport-moq-peer.md +++ b/docs/src/content/docs/reference/nodes/transport-moq-peer.md @@ -39,44 +39,44 @@ Bidirectional MoQ peer for real-time audio communication. Acts as both publisher ```json { "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "MoqPeerConfig", + "type": "object", "properties": { - "allow_reconnect": { - "default": false, - "description": "Allow publisher reconnections without recreating the session", - "type": "boolean" - }, - "gateway_path": { - "default": "/moq", - "description": "Base path for gateway routing (e.g., \"/moq\")\nPublishers connect to \"{gateway_path}/input\", subscribers to \"{gateway_path}/output\"", - "type": "string" - }, "input_broadcast": { - "default": "input", "description": "Broadcast name to receive from publisher client", - "type": "string" + "type": "string", + "default": "input" }, "output_broadcast": { - "default": "output", "description": "Broadcast name to send to subscriber clients", - "type": "string" + "type": "string", + "default": "output" + }, + "gateway_path": { + "description": "Base path for gateway routing (e.g., \"/moq\")\nPublishers connect to \"{gateway_path}/input\", subscribers to \"{gateway_path}/output\"", + "type": "string", + "default": "/moq" + }, + "allow_reconnect": { + "description": "Allow publisher reconnections without recreating the session", + "type": "boolean", + "default": false }, "output_group_duration_ms": { - "default": 40, "description": "Duration of each MoQ group in milliseconds for the subscriber output.\n\nDefault: 40ms (2 Opus frames at 20ms each).", + "type": "integer", "format": "uint64", "minimum": 0, - "type": "integer" + "default": 40 }, "output_initial_delay_ms": { - "default": 0, "description": "Adds a timestamp offset (playout delay) so receivers can buffer before playback.\n\nDefault: 0 (no added delay).", + "type": "integer", "format": "uint64", "minimum": 0, - "type": "integer" + "default": 0 } - }, - "title": "MoqPeerConfig", - "type": "object" + } } ``` diff --git a/docs/src/content/docs/reference/nodes/transport-moq-publisher.md b/docs/src/content/docs/reference/nodes/transport-moq-publisher.md index 4b0d68d4..2a27328e 100644 --- a/docs/src/content/docs/reference/nodes/transport-moq-publisher.md +++ b/docs/src/content/docs/reference/nodes/transport-moq-publisher.md @@ -38,46 +38,46 @@ No outputs. ```json { "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "MoqPushConfig", + "type": "object", "properties": { + "url": { + "type": "string", + "default": "" + }, + "jwt": { + "description": "Optional JWT for authenticated MoQ relays. When set, it is appended as `?jwt=...`.\n\nThis is compatible with moq-relay and StreamKit's built-in MoQ auth.", + "type": [ + "string", + "null" + ], + "default": null + }, "broadcast": { - "default": "", - "type": "string" + "type": "string", + "default": "" }, "channels": { - "default": 2, + "type": "integer", "format": "uint32", "minimum": 0, - "type": "integer" + "default": 2 }, "group_duration_ms": { - "default": 40, "description": "Duration of each MoQ group in milliseconds.\nSmaller groups = lower latency but more overhead.\nLarger groups = higher latency but better efficiency.\nDefault: 40ms (2 Opus frames at 20ms each).\nFor real-time applications, use 20-60ms. For high-latency networks, use 100ms+.", + "type": "integer", "format": "uint64", "minimum": 0, - "type": "integer" + "default": 40 }, "initial_delay_ms": { - "default": 0, "description": "Adds a timestamp offset (playout delay) so receivers can buffer before playback.\n\nThis is especially helpful when subscribers are on higher-latency / higher-jitter links,\nand the client begins playback as soon as it sees the first frame.\n\nDefault: 0 (no added delay).", + "type": "integer", "format": "uint64", "minimum": 0, - "type": "integer" - }, - "jwt": { - "default": null, - "description": "Optional JWT for authenticated MoQ relays. When set, it is appended as `?jwt=...`.\n\nThis is compatible with moq-relay and StreamKit's built-in MoQ auth.", - "type": [ - "string", - "null" - ] - }, - "url": { - "default": "", - "type": "string" + "default": 0 } - }, - "title": "MoqPushConfig", - "type": "object" + } } ``` diff --git a/docs/src/content/docs/reference/nodes/transport-moq-subscriber.md b/docs/src/content/docs/reference/nodes/transport-moq-subscriber.md index 1526b468..8bc9779d 100644 --- a/docs/src/content/docs/reference/nodes/transport-moq-subscriber.md +++ b/docs/src/content/docs/reference/nodes/transport-moq-subscriber.md @@ -36,33 +36,33 @@ No inputs. ```json { "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "MoqPullConfig", + "type": "object", "properties": { - "batch_ms": { - "default": 0, - "description": "Batch window in milliseconds. If > 0, after receiving a frame the node will\nwait up to this duration to collect additional frames before forwarding.\nDefault: 0 (no batching) - recommended because moq_lite's TrackConsumer::read()\nhas internal allocation overhead that makes batching counterproductive.", - "format": "uint64", - "minimum": 0, - "type": "integer" - }, - "broadcast": { - "default": "", - "type": "string" + "url": { + "type": "string", + "default": "" }, "jwt": { - "default": null, "description": "Optional JWT for authenticated MoQ relays. When set, it is appended as `?jwt=...`.\n\nThis is compatible with moq-relay and StreamKit's built-in MoQ auth.", "type": [ "string", "null" - ] + ], + "default": null }, - "url": { - "default": "", - "type": "string" + "broadcast": { + "type": "string", + "default": "" + }, + "batch_ms": { + "description": "Batch window in milliseconds. If > 0, after receiving a frame the node will\nwait up to this duration to collect additional frames before forwarding.\nDefault: 0 (no batching) - recommended because moq_lite's TrackConsumer::read()\nhas internal allocation overhead that makes batching counterproductive.", + "type": "integer", + "format": "uint64", + "minimum": 0, + "default": 0 } - }, - "title": "MoqPullConfig", - "type": "object" + } } ``` diff --git a/docs/src/content/docs/reference/packets/custom.md b/docs/src/content/docs/reference/packets/custom.md index e846ac7c..fe31a1bf 100644 --- a/docs/src/content/docs/reference/packets/custom.md +++ b/docs/src/content/docs/reference/packets/custom.md @@ -100,81 +100,81 @@ Custom packets are carried as `Packet::Custom(Arc)`. ```json { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "CustomPacketData", + "description": "Extensible structured packet data.", + "type": "object", + "properties": { + "type_id": { + "description": "Namespaced, versioned type id (e.g., `plugin::native::vad/vad-event@1`).", + "type": "string" + }, + "encoding": { + "$ref": "#/$defs/CustomEncoding" + }, + "data": true, + "metadata": { + "description": "Optional timing/ordering metadata.", + "anyOf": [ + { + "$ref": "#/$defs/PacketMetadata" + }, + { + "type": "null" + } + ] + } + }, + "required": [ + "type_id", + "encoding", + "data" + ], "$defs": { "CustomEncoding": { "description": "Encoding for [`Packet::Custom`] payloads.\n\nThis is intentionally extensible. For now we keep things user-friendly and debuggable.", "oneOf": [ { - "const": "json", "description": "UTF-8 JSON value (object/array/string/number/bool/null).", - "type": "string" + "type": "string", + "const": "json" } ] }, "PacketMetadata": { - "description": "Optional timing and sequencing metadata that can be attached to packets.\nUsed for pacing, synchronization, and A/V alignment.", + "description": "Optional timing and sequencing metadata that can be attached to packets.\nUsed for pacing, synchronization, and A/V alignment. See `timing` module for\ncanonical semantics (media-time epoch, monotonicity, and preservation rules).", + "type": "object", "properties": { + "timestamp_us": { + "description": "Absolute timestamp in microseconds (presentation time)", + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 0 + }, "duration_us": { "description": "Duration of this packet/frame in microseconds", - "format": "uint64", - "minimum": 0, "type": [ "integer", "null" - ] + ], + "format": "uint64", + "minimum": 0 }, "sequence": { "description": "Sequence number for ordering and detecting loss", - "format": "uint64", - "minimum": 0, "type": [ "integer", "null" - ] - }, - "timestamp_us": { - "description": "Absolute timestamp in microseconds (presentation time)", + ], "format": "uint64", - "minimum": 0, - "type": [ - "integer", - "null" - ] - } - }, - "type": "object" - } - }, - "$schema": "https://json-schema.org/draft/2020-12/schema", - "description": "Extensible structured packet data.", - "properties": { - "data": true, - "encoding": { - "$ref": "#/$defs/CustomEncoding" - }, - "metadata": { - "anyOf": [ - { - "$ref": "#/$defs/PacketMetadata" - }, - { - "type": "null" + "minimum": 0 } - ], - "description": "Optional timing/ordering metadata." - }, - "type_id": { - "description": "Namespaced, versioned type id (e.g., `plugin::native::vad/vad-event@1`).", - "type": "string" + } } - }, - "required": [ - "type_id", - "encoding", - "data" - ], - "title": "CustomPacketData", - "type": "object" + } } ``` diff --git a/docs/src/content/docs/reference/packets/raw-audio.md b/docs/src/content/docs/reference/packets/raw-audio.md index 1e22c8fd..54dfbea3 100644 --- a/docs/src/content/docs/reference/packets/raw-audio.md +++ b/docs/src/content/docs/reference/packets/raw-audio.md @@ -33,32 +33,24 @@ Raw audio is defined by an `AudioFormat` in the type system and carried as `Pack ```json { - "$defs": { - "SampleFormat": { - "description": "Describes the specific format of raw audio data.", - "enum": [ - "F32", - "S16Le" - ], - "type": "string" - } - }, "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "AudioFormat", "description": "Contains the detailed metadata for a raw audio stream.", + "type": "object", "properties": { + "sample_rate": { + "type": "integer", + "format": "uint32", + "minimum": 0 + }, "channels": { + "type": "integer", "format": "uint16", - "maximum": 65535, "minimum": 0, - "type": "integer" + "maximum": 65535 }, "sample_format": { "$ref": "#/$defs/SampleFormat" - }, - "sample_rate": { - "format": "uint32", - "minimum": 0, - "type": "integer" } }, "required": [ @@ -66,8 +58,16 @@ Raw audio is defined by an `AudioFormat` in the type system and carried as `Pack "channels", "sample_format" ], - "title": "AudioFormat", - "type": "object" + "$defs": { + "SampleFormat": { + "description": "Describes the specific format of raw audio data.", + "type": "string", + "enum": [ + "F32", + "S16Le" + ] + } + } } ``` diff --git a/docs/src/content/docs/reference/packets/transcription.md b/docs/src/content/docs/reference/packets/transcription.md index 5d829df6..157691e7 100644 --- a/docs/src/content/docs/reference/packets/transcription.md +++ b/docs/src/content/docs/reference/packets/transcription.md @@ -40,79 +40,22 @@ Transcriptions are carried as `Packet::Transcription(Arc)`. ```json { - "$defs": { - "PacketMetadata": { - "description": "Optional timing and sequencing metadata that can be attached to packets.\nUsed for pacing, synchronization, and A/V alignment.", - "properties": { - "duration_us": { - "description": "Duration of this packet/frame in microseconds", - "format": "uint64", - "minimum": 0, - "type": [ - "integer", - "null" - ] - }, - "sequence": { - "description": "Sequence number for ordering and detecting loss", - "format": "uint64", - "minimum": 0, - "type": [ - "integer", - "null" - ] - }, - "timestamp_us": { - "description": "Absolute timestamp in microseconds (presentation time)", - "format": "uint64", - "minimum": 0, - "type": [ - "integer", - "null" - ] - } - }, - "type": "object" - }, - "TranscriptionSegment": { - "description": "A segment of transcribed text with timing information.", - "properties": { - "confidence": { - "description": "Confidence score (0.0 - 1.0), if available", - "format": "float", - "type": [ - "number", - "null" - ] - }, - "end_time_ms": { - "description": "End time in milliseconds", - "format": "uint64", - "minimum": 0, - "type": "integer" - }, - "start_time_ms": { - "description": "Start time in milliseconds", - "format": "uint64", - "minimum": 0, - "type": "integer" - }, - "text": { - "description": "The transcribed text for this segment", - "type": "string" - } - }, - "required": [ - "text", - "start_time_ms", - "end_time_ms" - ], - "type": "object" - } - }, "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "TranscriptionData", "description": "Structured transcription data with timing and metadata.", + "type": "object", "properties": { + "text": { + "description": "The full transcribed text (concatenation of all segments)", + "type": "string" + }, + "segments": { + "description": "Individual segments with timing information", + "type": "array", + "items": { + "$ref": "#/$defs/TranscriptionSegment" + } + }, "language": { "description": "Detected or specified language code (e.g., \"en\", \"es\", \"fr\")", "type": [ @@ -121,6 +64,7 @@ Transcriptions are carried as `Packet::Transcription(Arc)`. ] }, "metadata": { + "description": "Optional timing metadata for the entire transcription", "anyOf": [ { "$ref": "#/$defs/PacketMetadata" @@ -128,27 +72,83 @@ Transcriptions are carried as `Packet::Transcription(Arc)`. { "type": "null" } - ], - "description": "Optional timing metadata for the entire transcription" - }, - "segments": { - "description": "Individual segments with timing information", - "items": { - "$ref": "#/$defs/TranscriptionSegment" - }, - "type": "array" - }, - "text": { - "description": "The full transcribed text (concatenation of all segments)", - "type": "string" + ] } }, "required": [ "text", "segments" ], - "title": "TranscriptionData", - "type": "object" + "$defs": { + "TranscriptionSegment": { + "description": "A segment of transcribed text with timing information.", + "type": "object", + "properties": { + "text": { + "description": "The transcribed text for this segment", + "type": "string" + }, + "start_time_ms": { + "description": "Start time in milliseconds", + "type": "integer", + "format": "uint64", + "minimum": 0 + }, + "end_time_ms": { + "description": "End time in milliseconds", + "type": "integer", + "format": "uint64", + "minimum": 0 + }, + "confidence": { + "description": "Confidence score (0.0 - 1.0), if available", + "type": [ + "number", + "null" + ], + "format": "float" + } + }, + "required": [ + "text", + "start_time_ms", + "end_time_ms" + ] + }, + "PacketMetadata": { + "description": "Optional timing and sequencing metadata that can be attached to packets.\nUsed for pacing, synchronization, and A/V alignment. See `timing` module for\ncanonical semantics (media-time epoch, monotonicity, and preservation rules).", + "type": "object", + "properties": { + "timestamp_us": { + "description": "Absolute timestamp in microseconds (presentation time)", + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 0 + }, + "duration_us": { + "description": "Duration of this packet/frame in microseconds", + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 0 + }, + "sequence": { + "description": "Sequence number for ordering and detecting loss", + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 0 + } + } + } + } } ``` diff --git a/docs/src/content/docs/reference/plugins/index.md b/docs/src/content/docs/reference/plugins/index.md index 630e0523..4776d38b 100644 --- a/docs/src/content/docs/reference/plugins/index.md +++ b/docs/src/content/docs/reference/plugins/index.md @@ -14,13 +14,14 @@ curl http://localhost:4545/api/v1/plugins curl http://localhost:4545/api/v1/schema/nodes | jq '.[] | select(.kind | startswith("plugin::"))' ``` -## Official plugins (8) +## Official plugins (9) - [`plugin::native::helsinki`](./plugin-native-helsinki/) (original kind: `helsinki`) - [`plugin::native::kokoro`](./plugin-native-kokoro/) (original kind: `kokoro`) - [`plugin::native::matcha`](./plugin-native-matcha/) (original kind: `matcha`) - [`plugin::native::nllb`](./plugin-native-nllb/) (original kind: `nllb`) - [`plugin::native::piper`](./plugin-native-piper/) (original kind: `piper`) +- [`plugin::native::pocket-tts`](./plugin-native-pocket-tts/) (original kind: `pocket-tts`) - [`plugin::native::sensevoice`](./plugin-native-sensevoice/) (original kind: `sensevoice`) - [`plugin::native::vad`](./plugin-native-vad/) (original kind: `vad`) - [`plugin::native::whisper`](./plugin-native-whisper/) (original kind: `whisper`) diff --git a/docs/src/content/docs/reference/plugins/plugin-native-pocket-tts.md b/docs/src/content/docs/reference/plugins/plugin-native-pocket-tts.md new file mode 100644 index 00000000..b51e9905 --- /dev/null +++ b/docs/src/content/docs/reference/plugins/plugin-native-pocket-tts.md @@ -0,0 +1,138 @@ +--- +# SPDX-FileCopyrightText: © 2025 StreamKit Contributors +# SPDX-License-Identifier: MPL-2.0 +title: "plugin::native::pocket-tts" +description: "Lightweight CPU TTS using Kyutai Pocket TTS (Candle). English-only voices with streaming output. Outputs 24kHz mono audio." +--- + +`kind`: `plugin::native::pocket-tts` (original kind: `pocket-tts`) + +Lightweight CPU TTS using Kyutai Pocket TTS (Candle). English-only voices with streaming output. Outputs 24kHz mono audio. + +Source: `plugins/native/pocket-tts/target/release/libpocket_tts.so` + +## Categories +- `audio` +- `tts` +- `ml` + +## Pins +### Inputs +- `in` accepts `Text, Binary` (one) +- `in_0` accepts `Text, Binary` (one) +- `in_1` accepts `Binary, Text` (one) + +### Outputs +- `out` produces `RawAudio(AudioFormat { sample_rate: 24000, channels: 1, sample_format: F32 })` (broadcast) + +## Parameters +| Name | Type | Required | Default | Description | +| --- | --- | --- | --- | --- | +| `config_path` | `null | string` | no | `null` | Optional config YAML path for custom variants/offline use | +| `eos_threshold` | `number` | no | `-4.0` | End-of-sequence threshold (more negative = longer output)
min: `-10`
max: `0` | +| `lsd_decode_steps` | `integer` | no | `1` | LSD decode steps (higher = better quality, slower)
min: `1`
max: `8` | +| `min_sentence_length` | `integer` | no | `10` | Minimum chars before triggering TTS
min: `1` | +| `noise_clamp` | `null | number` | no | `null` | Optional noise clamp (null disables) | +| `quantized` | `boolean` | no | `false` | Enable int8 quantized weights (requires plugin built with feature 'quantized') | +| `temperature` | `number` | no | `0.7` | Sampling temperature (higher = more variation)
min: `0.1`
max: `2` | +| `tokenizer_path` | `null | string` | no | `null` | Local tokenizer path for offline loading | +| `variant` | `string` | no | `b6369a24` | Model variant (config in pocket-tts crate) | +| `voice` | `string` | no | `alba` | Voice name, local .wav/.safetensors, hf:// URL, or base64 audio | +| `voice_embeddings_dir` | `null | string` | no | `null` | Directory with predefined voice embeddings (alba, marius, ...) | +| `weights_path` | `null | string` | no | `null` | Local weights path for offline loading | + + +
+Raw JSON Schema + +```json +{ + "properties": { + "config_path": { + "default": null, + "description": "Optional config YAML path for custom variants/offline use", + "type": [ + "string", + "null" + ] + }, + "eos_threshold": { + "default": -4.0, + "description": "End-of-sequence threshold (more negative = longer output)", + "maximum": 0.0, + "minimum": -10.0, + "type": "number" + }, + "lsd_decode_steps": { + "default": 1, + "description": "LSD decode steps (higher = better quality, slower)", + "maximum": 8, + "minimum": 1, + "type": "integer" + }, + "min_sentence_length": { + "default": 10, + "description": "Minimum chars before triggering TTS", + "minimum": 1, + "type": "integer" + }, + "noise_clamp": { + "default": null, + "description": "Optional noise clamp (null disables)", + "type": [ + "number", + "null" + ] + }, + "quantized": { + "default": false, + "description": "Enable int8 quantized weights (requires plugin built with feature 'quantized')", + "type": "boolean" + }, + "temperature": { + "default": 0.7, + "description": "Sampling temperature (higher = more variation)", + "maximum": 2.0, + "minimum": 0.1, + "type": "number" + }, + "tokenizer_path": { + "default": null, + "description": "Local tokenizer path for offline loading", + "type": [ + "string", + "null" + ] + }, + "variant": { + "default": "b6369a24", + "description": "Model variant (config in pocket-tts crate)", + "type": "string" + }, + "voice": { + "default": "alba", + "description": "Voice name, local .wav/.safetensors, hf:// URL, or base64 audio", + "type": "string" + }, + "voice_embeddings_dir": { + "default": null, + "description": "Directory with predefined voice embeddings (alba, marius, ...)", + "type": [ + "string", + "null" + ] + }, + "weights_path": { + "default": null, + "description": "Local weights path for offline loading", + "type": [ + "string", + "null" + ] + } + }, + "type": "object" +} +``` + +
diff --git a/docs/src/content/docs/reference/plugins/plugin-native-whisper.md b/docs/src/content/docs/reference/plugins/plugin-native-whisper.md index 6184f814..77f90cc9 100644 --- a/docs/src/content/docs/reference/plugins/plugin-native-whisper.md +++ b/docs/src/content/docs/reference/plugins/plugin-native-whisper.md @@ -31,7 +31,7 @@ Source: `plugins/native/whisper/target/release/libwhisper.so` | `language` | `string` | no | `en` | Language code (e.g., 'en', 'es', 'fr') | | `max_segment_duration_secs` | `number` | no | `30.0` | Maximum segment duration before forced transcription (seconds)
min: `5`
max: `120` | | `min_silence_duration_ms` | `integer` | no | `700` | Minimum silence duration before transcription (milliseconds)
min: `100`
max: `5000` | -| `model_path` | `string` | no | `models/ggml-base.en-q5_1.bin` | Path to Whisper GGML model file (relative to repo root). IMPORTANT: Input audio must be 16kHz mono f32. | +| `model_path` | `string` | no | `models/ggml-tiny.en-q5_1.bin` | Path to Whisper GGML model file (relative to repo root). IMPORTANT: Input audio must be 16kHz mono f32. | | `n_threads` | `integer` | no | `0` | Number of threads for decoding (0 = auto: min(4, num_cores), 8-12 recommended for modern CPUs)
min: `0`
max: `32` | | `suppress_blank` | `boolean` | no | `true` | Suppress blank/silent audio segments | | `suppress_non_speech_tokens` | `boolean` | no | `true` | Suppress non-speech tokens like [BLANK_AUDIO], [MUSIC], [APPLAUSE], etc. | @@ -63,7 +63,7 @@ steps: - kind: plugin::native::whisper params: - model_path: models/ggml-base.en-q5_1.bin + model_path: models/ggml-tiny.en-q5_1.bin language: en vad_model_path: models/silero_vad.onnx vad_threshold: 0.5 @@ -119,7 +119,7 @@ steps: "type": "integer" }, "model_path": { - "default": "models/ggml-base.en-q5_1.bin", + "default": "models/ggml-tiny.en-q5_1.bin", "description": "Path to Whisper GGML model file (relative to repo root). IMPORTANT: Input audio must be 16kHz mono f32.", "type": "string" }, diff --git a/justfile b/justfile index 9c10468b..bc854bf9 100644 --- a/justfile +++ b/justfile @@ -442,20 +442,28 @@ download-silero-vad: echo "✓ Silero VAD model already exists at models/silero_vad.onnx"; \ else \ curl -L -o models/silero_vad.onnx \ - https://raw.githubusercontent.com/snakers4/silero-vad/master/src/silero_vad/data/silero_vad.onnx && \ + https://huggingface.co/streamkit/whisper-models/resolve/main/silero_vad.onnx && \ echo "✓ Silero VAD model downloaded to models/silero_vad.onnx ($(du -h models/silero_vad.onnx | cut -f1))"; \ fi -# Download Whisper models (base.en quantized) +# Download Whisper models (tiny + base) download-whisper-models: @echo "Downloading Whisper models..." @mkdir -p models + @if [ -f models/ggml-tiny.en-q5_1.bin ]; then \ + echo "✓ Whisper tiny.en model already exists at models/ggml-tiny.en-q5_1.bin"; \ + else \ + echo "Downloading ggml-tiny.en-q5_1.bin (~31MB)..." && \ + curl -L -o models/ggml-tiny.en-q5_1.bin \ + https://huggingface.co/streamkit/whisper-models/resolve/main/ggml-tiny.en-q5_1.bin && \ + echo "✓ Whisper tiny.en model downloaded to models/ggml-tiny.en-q5_1.bin ($(du -h models/ggml-tiny.en-q5_1.bin | cut -f1))"; \ + fi @if [ -f models/ggml-base.en-q5_1.bin ]; then \ echo "✓ Whisper base.en model already exists at models/ggml-base.en-q5_1.bin"; \ else \ echo "Downloading ggml-base.en-q5_1.bin (~58MB)..." && \ curl -L -o models/ggml-base.en-q5_1.bin \ - https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en-q5_1.bin && \ + https://huggingface.co/streamkit/whisper-models/resolve/main/ggml-base.en-q5_1.bin && \ echo "✓ Whisper base.en model downloaded to models/ggml-base.en-q5_1.bin ($(du -h models/ggml-base.en-q5_1.bin | cut -f1))"; \ fi diff --git a/marketplace/official-plugins.json b/marketplace/official-plugins.json index cd99beef..e40bf670 100644 --- a/marketplace/official-plugins.json +++ b/marketplace/official-plugins.json @@ -21,6 +21,7 @@ "files": [ "opus-mt-en-es.tar.bz2" ], + "expected_size_bytes": 284844308, "license": "Apache-2.0", "license_url": "https://huggingface.co/Helsinki-NLP/opus-mt-en-es" }, @@ -34,6 +35,7 @@ "files": [ "opus-mt-es-en.tar.bz2" ], + "expected_size_bytes": 285443600, "license": "Apache-2.0", "license_url": "https://huggingface.co/Helsinki-NLP/opus-mt-es-en" } @@ -60,6 +62,7 @@ "files": [ "kokoro-multi-lang-v1_1.tar.bz2" ], + "expected_size_bytes": 364816464, "license": "Apache-2.0", "license_url": "https://github.com/k2-fsa/sherpa-onnx/blob/master/LICENSE", "sha256": "a3f4c73d043860e3fd2e5b06f36795eb81de0fc8e8de6df703245edddd87dbad" @@ -88,6 +91,7 @@ "matcha-icefall-en_US-ljspeech.tar.bz2", "matcha-icefall-en_US-ljspeech/vocos-22khz-univ.onnx" ], + "expected_size_bytes": 130630855, "license": "CC-BY-4.0", "license_url": "https://keithito.com/LJ-Speech-Dataset/" } @@ -114,6 +118,7 @@ "files": [ "nllb-200-distilled-600M-ct2-int8.tar.bz2" ], + "expected_size_bytes": 1135260128, "license": "CC-BY-NC-4.0", "license_url": "https://huggingface.co/facebook/nllb-200-distilled-600M" } @@ -140,6 +145,7 @@ "files": [ "vits-piper-en_US-libritts_r-medium.tar.bz2" ], + "expected_size_bytes": 82018491, "license": "CC-BY-4.0 + GPL-3.0", "license_url": "http://www.openslr.org/141/", "sha256": "78c137daa7eddaf57190cf05c020efd6e593015f62c82ee999ef570fc2dff496" @@ -154,6 +160,7 @@ "files": [ "vits-piper-es_MX-claude-high.tar.bz2" ], + "expected_size_bytes": 67207890, "license": "Apache-2.0 + GPL-3.0", "license_url": "https://huggingface.co/spaces/HirCoir/Piper-TTS-Spanish", "sha256": "ec33fb689c248fe64810aab564cba97babf0f506672cfd404928d46e751a4721" @@ -181,6 +188,7 @@ "files": [ "sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2" ], + "expected_size_bytes": 165783878, "license": "Apache-2.0", "license_url": "https://huggingface.co/ASLP-lab/WSYue-ASR", "sha256": "7305f7905bfcf77fa0b39388a313f3da35c68d971661a65475b56fb2162c8e63" @@ -195,6 +203,7 @@ "files": [ "silero_vad.onnx" ], + "expected_size_bytes": 2327524, "license": "MIT", "license_url": "https://github.com/snakers4/silero-vad/blob/master/LICENSE", "sha256": "1a153a22f4509e292a94e67d6f9b85e8deb25b4988682b7e174c65279d8788e3" @@ -222,6 +231,7 @@ "files": [ "ten-vad.onnx" ], + "expected_size_bytes": 332211, "license": "LicenseRef-ten-vad", "license_url": "https://github.com/TEN-framework/ten-vad", "sha256": "718cb7eef47e3cf5ddbe7e967a7503f46b8b469c0706872f494dfa921b486206" @@ -249,6 +259,7 @@ "files": [ "ggml-tiny.en-q5_1.bin" ], + "expected_size_bytes": 32166155, "license": "MIT", "license_url": "https://github.com/openai/whisper/blob/main/LICENSE", "sha256": "c77c5766f1cef09b6b7d47f21b546cbddd4157886b3b5d6d4f709e91e66c7c2b" @@ -263,6 +274,7 @@ "files": [ "ggml-base.en-q5_1.bin" ], + "expected_size_bytes": 59721011, "license": "MIT", "license_url": "https://github.com/openai/whisper/blob/main/LICENSE", "sha256": "4baf70dd0d7c4247ba2b81fafd9c01005ac77c2f9ef064e00dcf195d0e2fdd2f" @@ -277,6 +289,7 @@ "files": [ "ggml-base-q5_1.bin" ], + "expected_size_bytes": 59707625, "license": "MIT", "license_url": "https://github.com/openai/whisper/blob/main/LICENSE", "sha256": "422f1ae452ade6f30a004d7e5c6a43195e4433bc370bf23fac9cc591f01a8898" @@ -291,6 +304,7 @@ "files": [ "silero_vad.onnx" ], + "expected_size_bytes": 2327524, "license": "MIT", "license_url": "https://github.com/snakers4/silero-vad/blob/master/LICENSE", "sha256": "1a153a22f4509e292a94e67d6f9b85e8deb25b4988682b7e174c65279d8788e3" diff --git a/plugins/native/helsinki/plugin.yml b/plugins/native/helsinki/plugin.yml index c193039c..d5beba16 100644 --- a/plugins/native/helsinki/plugin.yml +++ b/plugins/native/helsinki/plugin.yml @@ -16,6 +16,7 @@ models: revision: main files: - opus-mt-en-es.tar.bz2 + expected_size_bytes: 284844308 license: Apache-2.0 license_url: https://huggingface.co/Helsinki-NLP/opus-mt-en-es - id: opus-mt-es-en @@ -26,5 +27,6 @@ models: revision: main files: - opus-mt-es-en.tar.bz2 + expected_size_bytes: 285443600 license: Apache-2.0 license_url: https://huggingface.co/Helsinki-NLP/opus-mt-es-en diff --git a/plugins/native/kokoro/plugin.yml b/plugins/native/kokoro/plugin.yml index e3ecf113..864beb4c 100644 --- a/plugins/native/kokoro/plugin.yml +++ b/plugins/native/kokoro/plugin.yml @@ -16,6 +16,7 @@ models: revision: main files: - kokoro-multi-lang-v1_1.tar.bz2 + expected_size_bytes: 364816464 license: Apache-2.0 license_url: https://github.com/k2-fsa/sherpa-onnx/blob/master/LICENSE sha256: a3f4c73d043860e3fd2e5b06f36795eb81de0fc8e8de6df703245edddd87dbad diff --git a/plugins/native/matcha/plugin.yml b/plugins/native/matcha/plugin.yml index 9282dc0b..997ddae8 100644 --- a/plugins/native/matcha/plugin.yml +++ b/plugins/native/matcha/plugin.yml @@ -17,5 +17,6 @@ models: files: - matcha-icefall-en_US-ljspeech.tar.bz2 - matcha-icefall-en_US-ljspeech/vocos-22khz-univ.onnx + expected_size_bytes: 130630855 license: CC-BY-4.0 license_url: https://keithito.com/LJ-Speech-Dataset/ diff --git a/plugins/native/nllb/plugin.yml b/plugins/native/nllb/plugin.yml index aff20609..90391cf3 100644 --- a/plugins/native/nllb/plugin.yml +++ b/plugins/native/nllb/plugin.yml @@ -16,5 +16,6 @@ models: revision: main files: - nllb-200-distilled-600M-ct2-int8.tar.bz2 + expected_size_bytes: 1135260128 license: CC-BY-NC-4.0 license_url: https://huggingface.co/facebook/nllb-200-distilled-600M diff --git a/plugins/native/piper/plugin.yml b/plugins/native/piper/plugin.yml index c20559b7..1d45d7bb 100644 --- a/plugins/native/piper/plugin.yml +++ b/plugins/native/piper/plugin.yml @@ -16,6 +16,7 @@ models: revision: main files: - vits-piper-en_US-libritts_r-medium.tar.bz2 + expected_size_bytes: 82018491 license: CC-BY-4.0 + GPL-3.0 license_url: http://www.openslr.org/141/ sha256: 78c137daa7eddaf57190cf05c020efd6e593015f62c82ee999ef570fc2dff496 @@ -27,6 +28,7 @@ models: revision: main files: - vits-piper-es_MX-claude-high.tar.bz2 + expected_size_bytes: 67207890 license: Apache-2.0 + GPL-3.0 license_url: https://huggingface.co/spaces/HirCoir/Piper-TTS-Spanish sha256: ec33fb689c248fe64810aab564cba97babf0f506672cfd404928d46e751a4721 diff --git a/plugins/native/sensevoice/plugin.yml b/plugins/native/sensevoice/plugin.yml index 2c4748eb..4cedfbd0 100644 --- a/plugins/native/sensevoice/plugin.yml +++ b/plugins/native/sensevoice/plugin.yml @@ -16,6 +16,7 @@ models: revision: main files: - sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 + expected_size_bytes: 165783878 license: Apache-2.0 license_url: https://huggingface.co/ASLP-lab/WSYue-ASR sha256: 7305f7905bfcf77fa0b39388a313f3da35c68d971661a65475b56fb2162c8e63 @@ -27,6 +28,7 @@ models: revision: main files: - silero_vad.onnx + expected_size_bytes: 2327524 license: MIT license_url: https://github.com/snakers4/silero-vad/blob/master/LICENSE sha256: 1a153a22f4509e292a94e67d6f9b85e8deb25b4988682b7e174c65279d8788e3 diff --git a/plugins/native/vad/plugin.yml b/plugins/native/vad/plugin.yml index a8ab9637..80bcf31b 100644 --- a/plugins/native/vad/plugin.yml +++ b/plugins/native/vad/plugin.yml @@ -16,6 +16,7 @@ models: revision: main files: - ten-vad.onnx + expected_size_bytes: 332211 license: LicenseRef-ten-vad license_url: https://github.com/TEN-framework/ten-vad sha256: 718cb7eef47e3cf5ddbe7e967a7503f46b8b469c0706872f494dfa921b486206 diff --git a/plugins/native/whisper/README.md b/plugins/native/whisper/README.md index 07255d51..3585c362 100644 --- a/plugins/native/whisper/README.md +++ b/plugins/native/whisper/README.md @@ -56,7 +56,7 @@ The plugin uses a two-stage architecture: ## Configuration Parameters ```yaml -model_path: "models/ggml-base.en-q5_1.bin" # Path to Whisper GGML model +model_path: "models/ggml-tiny.en-q5_1.bin" # Path to Whisper GGML model language: "en" # Language code (en, es, fr, etc.) vad_model_path: "models/silero_vad.onnx" # Path to Silero VAD model vad_threshold: 0.5 # Speech probability threshold (0.0-1.0) @@ -102,32 +102,20 @@ just install-plugins ### Whisper Models -Whisper requires GGML model files. Download them from the [official repository](https://huggingface.co/ggerganov/whisper.cpp/tree/main): +Whisper requires GGML model files. StreamKit mirrors the recommended models on Hugging Face for +official plugin installs: ```bash # Create models directory in repo root mkdir -p models -# Download base.en-q5_1 model (recommended, 60MB, quantized for faster performance) -curl -L -o models/ggml-base.en-q5_1.bin \ - https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en-q5_1.bin - -# Or download other models: -# Full precision base.en (148MB, slightly better quality) -curl -L -o models/ggml-base.en.bin \ - https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en.bin - -# Q8 quantization (82MB, better quality than q5_1) -curl -L -o models/ggml-base.en-q8_0.bin \ - https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en-q8_0.bin +# Download tiny.en-q5_1 model (recommended for quick tests) +curl -L -o models/ggml-tiny.en-q5_1.bin \ + https://huggingface.co/streamkit/whisper-models/resolve/main/ggml-tiny.en-q5_1.bin -# tiny.en (75MB, fastest, ~40x realtime) -curl -L -o models/ggml-tiny.en.bin \ - https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny.en.bin - -# small.en (466MB, higher accuracy, ~2-10x realtime) -curl -L -o models/ggml-small.en.bin \ - https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.en.bin +# Optional: base.en-q5_1 model (higher quality) +curl -L -o models/ggml-base.en-q5_1.bin \ + https://huggingface.co/streamkit/whisper-models/resolve/main/ggml-base.en-q5_1.bin ``` ### VAD Model Setup @@ -137,7 +125,7 @@ Download the Silero VAD v6 ONNX model: ```bash # Download Silero VAD model (3.5MB) curl -L -o models/silero_vad.onnx \ - https://raw.githubusercontent.com/snakers4/silero-vad/master/src/silero_vad/data/silero_vad.onnx + https://huggingface.co/streamkit/whisper-models/resolve/main/silero_vad.onnx ``` **Note**: The VAD model is required for the plugin to function. Without it, initialization will fail. @@ -253,7 +241,7 @@ Error: Failed to load VAD model from 'models/silero_vad.onnx' **Solution**: Ensure both model files exist: ```bash -ls -lh models/ggml-base.en-q5_1.bin models/silero_vad.onnx +ls -lh models/ggml-tiny.en-q5_1.bin models/silero_vad.onnx ``` Download missing models using the commands in [Model Setup](#model-setup). @@ -284,7 +272,7 @@ Download missing models using the commands in [Model Setup](#model-setup). ### Slow performance -1. **Use smaller model**: Try `ggml-tiny.en.bin` instead of base/small +1. **Use smaller model**: Try `ggml-tiny.en-q5_1.bin` instead of base/small 2. **Check CPU usage**: Ensure other processes aren't competing 3. **Verify VAD overhead**: VAD adds <1ms per 32ms frame (~3% overhead) diff --git a/plugins/native/whisper/plugin.yml b/plugins/native/whisper/plugin.yml index 8cf9186f..eb66b6bb 100644 --- a/plugins/native/whisper/plugin.yml +++ b/plugins/native/whisper/plugin.yml @@ -16,6 +16,7 @@ models: revision: main files: - ggml-tiny.en-q5_1.bin + expected_size_bytes: 32166155 license: MIT license_url: https://github.com/openai/whisper/blob/main/LICENSE sha256: c77c5766f1cef09b6b7d47f21b546cbddd4157886b3b5d6d4f709e91e66c7c2b @@ -27,6 +28,7 @@ models: revision: main files: - ggml-base.en-q5_1.bin + expected_size_bytes: 59721011 license: MIT license_url: https://github.com/openai/whisper/blob/main/LICENSE sha256: 4baf70dd0d7c4247ba2b81fafd9c01005ac77c2f9ef064e00dcf195d0e2fdd2f @@ -38,6 +40,7 @@ models: revision: main files: - ggml-base-q5_1.bin + expected_size_bytes: 59707625 license: MIT license_url: https://github.com/openai/whisper/blob/main/LICENSE sha256: 422f1ae452ade6f30a004d7e5c6a43195e4433bc370bf23fac9cc591f01a8898 @@ -49,6 +52,7 @@ models: revision: main files: - silero_vad.onnx + expected_size_bytes: 2327524 license: MIT license_url: https://github.com/snakers4/silero-vad/blob/master/LICENSE sha256: 1a153a22f4509e292a94e67d6f9b85e8deb25b4988682b7e174c65279d8788e3 diff --git a/plugins/native/whisper/src/lib.rs b/plugins/native/whisper/src/lib.rs index 28ccf833..713d1c8e 100644 --- a/plugins/native/whisper/src/lib.rs +++ b/plugins/native/whisper/src/lib.rs @@ -112,7 +112,7 @@ const fn default_suppress_non_speech_tokens() -> bool { } fn default_model_path() -> String { - "models/ggml-base.en-q5_1.bin".to_string() + "models/ggml-tiny.en-q5_1.bin".to_string() } fn default_language() -> String { @@ -244,7 +244,7 @@ impl NativeProcessorNode for WhisperPlugin { "model_path": { "type": "string", "description": "Path to Whisper GGML model file (relative to repo root). IMPORTANT: Input audio must be 16kHz mono f32.", - "default": "models/ggml-base.en-q5_1.bin" + "default": "models/ggml-tiny.en-q5_1.bin" }, "language": { "type": "string", diff --git a/samples/pipelines/dynamic/VOICE_AGENT.md b/samples/pipelines/dynamic/VOICE_AGENT.md index 16796a79..207bcad9 100644 --- a/samples/pipelines/dynamic/VOICE_AGENT.md +++ b/samples/pipelines/dynamic/VOICE_AGENT.md @@ -56,7 +56,7 @@ The Stream View telemetry timeline shows: Download and place the following models in the `models/` directory: -- **Whisper STT**: `ggml-base.en-q5_1.bin` (~140 MB) +- **Whisper STT**: `ggml-tiny.en-q5_1.bin` (~31 MB) - **VAD**: `silero_vad.onnx` (~3.5 MB) - **Kokoro TTS**: `kokoro-multi-lang-v1_1/` directory (~360 MB) diff --git a/samples/pipelines/dynamic/WEATHER_AGENT.md b/samples/pipelines/dynamic/WEATHER_AGENT.md index 3bec6104..3db4351a 100644 --- a/samples/pipelines/dynamic/WEATHER_AGENT.md +++ b/samples/pipelines/dynamic/WEATHER_AGENT.md @@ -19,7 +19,7 @@ This sample pipeline turns voice questions into spoken weather answers: ## Prerequisites -- Whisper STT model: `models/ggml-base.en-q5_1.bin` +- Whisper STT model: `models/ggml-tiny.en-q5_1.bin` - Silero VAD model: `models/silero_vad.onnx` - Kokoro model dir: `models/kokoro-multi-lang-v1_1` diff --git a/samples/pipelines/dynamic/speech-translate-en-es.yaml b/samples/pipelines/dynamic/speech-translate-en-es.yaml index eab2aceb..b5bcf74b 100644 --- a/samples/pipelines/dynamic/speech-translate-en-es.yaml +++ b/samples/pipelines/dynamic/speech-translate-en-es.yaml @@ -8,7 +8,7 @@ # translates to Spanish, synthesizes Spanish speech, and outputs via MoQ. # # Prerequisites: -# - English Whisper model: models/ggml-base.en-q5_1.bin +# - English Whisper model: models/ggml-tiny.en-q5_1.bin # - Silero VAD model: models/silero_vad.onnx # - NLLB translation model: models/nllb-200-distilled-600M-ct2-int8 # - Mexican Spanish Piper voice: models/vits-piper-es_MX-claude-high @@ -59,7 +59,7 @@ nodes: whisper_stt: kind: plugin::native::whisper params: - model_path: models/ggml-base.en-q5_1.bin + model_path: models/ggml-tiny.en-q5_1.bin language: en vad_model_path: models/silero_vad.onnx vad_threshold: 0.4 diff --git a/samples/pipelines/dynamic/speech-translate-helsinki-en-es.yaml b/samples/pipelines/dynamic/speech-translate-helsinki-en-es.yaml index b24a2403..111a73bb 100644 --- a/samples/pipelines/dynamic/speech-translate-helsinki-en-es.yaml +++ b/samples/pipelines/dynamic/speech-translate-helsinki-en-es.yaml @@ -12,7 +12,7 @@ # making them suitable for commercial deployments. # # Prerequisites: -# - English Whisper model: models/ggml-base.en-q5_1.bin +# - English Whisper model: models/ggml-tiny.en-q5_1.bin # - Silero VAD model: models/silero_vad.onnx # - Helsinki OPUS-MT EN->ES: models/opus-mt-en-es # - Mexican Spanish Piper voice: models/vits-piper-es_MX-claude-high @@ -63,7 +63,7 @@ nodes: whisper_stt: kind: plugin::native::whisper params: - model_path: models/ggml-base.en-q5_1.bin + model_path: models/ggml-tiny.en-q5_1.bin language: en vad_model_path: models/silero_vad.onnx vad_threshold: 0.4 diff --git a/samples/pipelines/dynamic/voice-agent-openai.yaml b/samples/pipelines/dynamic/voice-agent-openai.yaml index 28365dd3..4a73d601 100644 --- a/samples/pipelines/dynamic/voice-agent-openai.yaml +++ b/samples/pipelines/dynamic/voice-agent-openai.yaml @@ -3,7 +3,7 @@ # SPDX-License-Identifier: MPL-2.0 # Prerequisites: -# - Whisper STT model: `models/ggml-base.en-q5_1.bin` +# - Whisper STT model: `models/ggml-tiny.en-q5_1.bin` # - Silero VAD model (used by Whisper): `models/silero_vad.onnx` # - Kokoro model dir: `models/kokoro-multi-lang-v1_1` # @@ -47,7 +47,7 @@ nodes: whisper_stt: kind: plugin::native::whisper params: - model_path: models/ggml-base.en-q5_1.bin + model_path: models/ggml-tiny.en-q5_1.bin language: en vad_model_path: models/silero_vad.onnx vad_threshold: 0.3 diff --git a/samples/pipelines/dynamic/voice-weather-open-meteo.yaml b/samples/pipelines/dynamic/voice-weather-open-meteo.yaml index 042c075c..f5590e89 100644 --- a/samples/pipelines/dynamic/voice-weather-open-meteo.yaml +++ b/samples/pipelines/dynamic/voice-weather-open-meteo.yaml @@ -3,7 +3,7 @@ # SPDX-License-Identifier: MPL-2.0 # # Prerequisites: -# - Whisper STT model: `models/ggml-base.en-q5_1.bin` +# - Whisper STT model: `models/ggml-tiny.en-q5_1.bin` # - Silero VAD model (used by Whisper): `models/silero_vad.onnx` # - Kokoro model dir: `models/kokoro-multi-lang-v1_1` # @@ -65,7 +65,7 @@ nodes: whisper_stt: kind: plugin::native::whisper params: - model_path: models/ggml-base.en-q5_1.bin + model_path: models/ggml-tiny.en-q5_1.bin language: en vad_model_path: models/silero_vad.onnx vad_threshold: 0.35 diff --git a/samples/pipelines/oneshot/speech_to_text.yml b/samples/pipelines/oneshot/speech_to_text.yml index 8e256b66..f772a676 100644 --- a/samples/pipelines/oneshot/speech_to_text.yml +++ b/samples/pipelines/oneshot/speech_to_text.yml @@ -19,7 +19,7 @@ steps: - kind: plugin::native::whisper params: - model_path: models/ggml-base.en-q5_1.bin + model_path: models/ggml-tiny.en-q5_1.bin language: en vad_model_path: models/silero_vad.onnx vad_threshold: 0.5 diff --git a/samples/skit.toml b/samples/skit.toml index b6a1e139..be9b34a0 100644 --- a/samples/skit.toml +++ b/samples/skit.toml @@ -196,6 +196,42 @@ packet_batch_size = 32 # Directory for plugin artifacts (StreamKit uses `/wasm` and `/native`) directory = ".plugins" +# Allow runtime plugin upload/delete via HTTP API (default: false) +# allow_http_management = false + +# Enable plugin marketplace API and UI (default: false) +# marketplace_enabled = false + +# Allow installing native plugins from the marketplace (default: false) +# Native plugins run in-process; only enable for fully trusted registries. +# allow_native_marketplace = false + +# Minisign public keys (contents of `.pub` files) trusted for marketplace manifests +# trusted_pubkeys = [ +# "untrusted comment: minisign public key 81C485A94492F33F\nRWQ/85JEqYXEgX+2kl7Rwd8AcpVjYciSLzvLggzivbGyIrDPjfmcqjYP\n", +# ] + +# Marketplace registry index URLs (e.g., https://example.com/index.json) +# registries = ["https://streamkit.dev/marketplace/index.json"] + +# Marketplace security controls +# allow_model_urls = false +# marketplace_require_registry_origin = false +# marketplace_url_allowlist = [ +# "https://github.com", +# "https://objects.githubusercontent.com", +# "https://release-assets.githubusercontent.com", +# ] +# marketplace_scheme_policy = "https_only" +# marketplace_host_policy = "public_only" +# marketplace_resolve_hostnames = false + +# Directory to store downloaded models (default: "models") +# models_dir = "models" + +# Optional Hugging Face token for gated model downloads +# huggingface_token = "hf_..." + [security] # Security configuration for file access and other security-sensitive settings @@ -266,8 +302,8 @@ enabled = false # Example: Whisper STT with GPU # [[resources.prewarm.plugins]] # kind = "plugin::native::whisper" -# params = { use_gpu = true, gpu_device = 0, model_path = "models/ggml-base.en-q5_1.bin" } -# fallback_params = { use_gpu = false, model_path = "models/ggml-base.en-q5_1.bin" } +# params = { use_gpu = true, gpu_device = 0, model_path = "models/ggml-tiny.en-q5_1.bin" } +# fallback_params = { use_gpu = false, model_path = "models/ggml-tiny.en-q5_1.bin" } # Example: Kokoro with CPU only (no fallback needed) # [[resources.prewarm.plugins]] diff --git a/scripts/marketplace/upload_models_to_hf.py b/scripts/marketplace/upload_models_to_hf.py new file mode 100644 index 00000000..f9763a2d --- /dev/null +++ b/scripts/marketplace/upload_models_to_hf.py @@ -0,0 +1,208 @@ +#!/usr/bin/env python3 +# SPDX-FileCopyrightText: © 2025 StreamKit Contributors +# +# SPDX-License-Identifier: MPL-2.0 + +import argparse +import hashlib +import json +import os +import pathlib +import sys +import tarfile + + +def sha256_file(path: pathlib.Path) -> str: + hasher = hashlib.sha256() + with path.open("rb") as handle: + for chunk in iter(lambda: handle.read(1024 * 1024), b""): + hasher.update(chunk) + return hasher.hexdigest() + + +def load_metadata(path: pathlib.Path) -> dict: + return json.loads(path.read_text()) + + +def is_hidden_path(path: pathlib.Path) -> bool: + return any(part.startswith(".") for part in path.parts) + + +def archive_mode(file_path: str) -> tuple[str, str] | None: + if file_path.endswith(".tar.bz2"): + return file_path[: -len(".tar.bz2")], "w:bz2" + if file_path.endswith(".tbz2"): + return file_path[: -len(".tbz2")], "w:bz2" + if file_path.endswith(".tar.gz"): + return file_path[: -len(".tar.gz")], "w:gz" + if file_path.endswith(".tgz"): + return file_path[: -len(".tgz")], "w:gz" + if file_path.endswith(".tar"): + return file_path[: -len(".tar")], "w" + return None + + +def maybe_create_archive( + models_dir: pathlib.Path, file_path: str, create_archives: bool +) -> pathlib.Path | None: + if not create_archives: + return None + archive_path = models_dir / file_path + if archive_path.exists(): + return archive_path + if pathlib.Path(file_path).parent != pathlib.Path("."): + return None + mode = archive_mode(file_path) + if mode is None: + return None + base_name, tar_mode = mode + source_dir = models_dir / base_name + if not source_dir.is_dir(): + return None + + def filter_hidden(tar_info: tarfile.TarInfo) -> tarfile.TarInfo | None: + if is_hidden_path(pathlib.Path(tar_info.name)): + return None + return tar_info + + print(f"Creating archive {archive_path} from {source_dir}...") + with tarfile.open(archive_path, tar_mode) as tar: + tar.add(source_dir, arcname=base_name, filter=filter_hidden) + return archive_path + + +def find_local_path( + models_dir: pathlib.Path, file_path: str, create_archives: bool +) -> pathlib.Path | None: + candidate = models_dir / file_path + if candidate.exists(): + return candidate + candidate = maybe_create_archive(models_dir, file_path, create_archives) + if candidate is not None and candidate.exists(): + return candidate + basename = pathlib.Path(file_path).name + candidate = models_dir / basename + if candidate.exists(): + return candidate + return None + + +def collect_models(metadata: dict, repo_id: str) -> list[tuple[str, str | None]]: + files: list[tuple[str, str | None]] = [] + for plugin in metadata.get("plugins", []): + for model in plugin.get("models", []): + if model.get("source") != "huggingface": + continue + if model.get("repo_id") != repo_id: + continue + for file_path in model.get("files", []): + files.append((file_path, model.get("sha256"))) + return files + + +def main() -> int: + parser = argparse.ArgumentParser() + parser.add_argument( + "--metadata", + default="marketplace/official-plugins.json", + help="Path to official plugins metadata JSON", + ) + parser.add_argument( + "--models-dir", + default="models", + help="Local directory containing model files", + ) + parser.add_argument( + "--repo", + default="streamkit/whisper-models", + help="Hugging Face repo to upload into (e.g. streamkit/whisper-models)", + ) + parser.add_argument( + "--revision", + default="main", + help="Target branch or revision in the Hugging Face repo", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Print planned uploads without pushing files", + ) + parser.add_argument( + "--verify-hashes", + action="store_true", + help="Fail if local hashes do not match manifest hashes", + ) + parser.add_argument( + "--create-archives", + action="store_true", + help="Create .tar.bz2/.tar.gz archives from model directories when missing", + ) + args = parser.parse_args() + + metadata_path = pathlib.Path(args.metadata) + models_dir = pathlib.Path(args.models_dir) + if not metadata_path.exists(): + print(f"Missing metadata file: {metadata_path}", file=sys.stderr) + return 1 + if not models_dir.exists(): + print(f"Missing models directory: {models_dir}", file=sys.stderr) + return 1 + + metadata = load_metadata(metadata_path) + files = collect_models(metadata, args.repo) + if not files: + print(f"No Hugging Face models found for repo '{args.repo}'") + return 0 + + uploads: list[tuple[pathlib.Path, str]] = [] + for file_path, expected_hash in files: + local_path = find_local_path(models_dir, file_path, args.create_archives) + if local_path is None: + print(f"Missing local model file for '{file_path}'", file=sys.stderr) + return 1 + actual_hash = sha256_file(local_path) + if expected_hash and expected_hash != actual_hash: + message = ( + f"Hash mismatch for {local_path} ({actual_hash} != {expected_hash})" + ) + if args.verify_hashes: + print(message, file=sys.stderr) + return 1 + print(f"Warning: {message}", file=sys.stderr) + uploads.append((local_path, file_path)) + + for local_path, repo_path in uploads: + print(f"Upload: {local_path} -> {args.repo}:{repo_path}") + + if args.dry_run: + return 0 + + token = os.environ.get("HF_TOKEN") + if not token: + print("HF_TOKEN is required for uploading to Hugging Face", file=sys.stderr) + return 1 + + try: + from huggingface_hub import HfApi + except ImportError: + print("Missing dependency: pip install huggingface_hub", file=sys.stderr) + return 1 + + api = HfApi(token=token) + api.create_repo(repo_id=args.repo, repo_type="model", exist_ok=True) + + for local_path, repo_path in uploads: + api.upload_file( + path_or_fileobj=str(local_path), + path_in_repo=repo_path, + repo_id=args.repo, + repo_type="model", + revision=args.revision, + token=token, + ) + + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/ui/src/App.tsx b/ui/src/App.tsx index 93298f2c..46c42bbb 100644 --- a/ui/src/App.tsx +++ b/ui/src/App.tsx @@ -21,6 +21,7 @@ import ConvertView from './views/ConvertView'; import DesignView from './views/DesignView'; import LoginView from './views/LoginView'; import MonitorView from './views/MonitorView'; +import PluginsView from './views/PluginsView'; import StreamView from './views/StreamView'; import TokensView from './views/TokensView'; @@ -116,6 +117,8 @@ const App: React.FC = () => { } /> } /> } /> + } /> + } /> } /> diff --git a/ui/src/Layout.tsx b/ui/src/Layout.tsx index 11b0800c..6dc17cf5 100644 --- a/ui/src/Layout.tsx +++ b/ui/src/Layout.tsx @@ -321,7 +321,7 @@ const Layout: React.FC = () => { Monitor Convert Stream - {role === 'admin' && Admin} + {role === 'admin' && Admin} diff --git a/ui/src/services/marketplace.test.ts b/ui/src/services/marketplace.test.ts new file mode 100644 index 00000000..2ee0afae --- /dev/null +++ b/ui/src/services/marketplace.test.ts @@ -0,0 +1,113 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +import { + cancelMarketplaceJob, + getMarketplaceJob, + getMarketplacePlugin, + installMarketplacePlugin, + listMarketplacePlugins, + listMarketplaceRegistries, +} from './marketplace'; + +vi.mock('./base', () => ({ + getApiUrl: () => 'http://localhost:4545', + fetchApi: (path: string, options: RequestInit = {}) => { + const normalized = path.startsWith('/') ? path : `/${path}`; + return fetch(`http://localhost:4545${normalized}`, { ...options, credentials: 'include' }); + }, +})); + +const mockJsonResponse = (payload: unknown) => { + (global.fetch as ReturnType).mockResolvedValue({ + ok: true, + status: 200, + json: async () => payload, + text: async () => '', + }); +}; + +describe('marketplace service', () => { + beforeEach(() => { + global.fetch = vi.fn() as never; + vi.clearAllMocks(); + }); + + it('lists registries', async () => { + mockJsonResponse([]); + + await listMarketplaceRegistries(); + + expect(global.fetch).toHaveBeenCalledWith( + 'http://localhost:4545/api/v1/marketplace/registries', + { credentials: 'include' } + ); + }); + + it('lists marketplace plugins with query', async () => { + mockJsonResponse({ schema_version: 1, plugins: [] }); + + await listMarketplacePlugins('registry-url', 'whisper'); + + expect(global.fetch).toHaveBeenCalledWith( + 'http://localhost:4545/api/v1/marketplace/plugins?registry=registry-url&q=whisper', + { credentials: 'include' } + ); + }); + + it('fetches plugin details with version', async () => { + mockJsonResponse({}); + + await getMarketplacePlugin('registry-url', 'plugin-id', '1.2.3'); + + expect(global.fetch).toHaveBeenCalledWith( + 'http://localhost:4545/api/v1/marketplace/plugins/plugin-id?registry=registry-url&version=1.2.3', + { credentials: 'include' } + ); + }); + + it('starts install job', async () => { + mockJsonResponse({ job_id: 'job-123' }); + const request = { + registry: 'registry-url', + plugin_id: 'plugin-id', + version: '1.2.3', + install_models: false, + }; + + await installMarketplacePlugin(request); + + expect(global.fetch).toHaveBeenCalledWith('http://localhost:4545/api/v1/plugins/install', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(request), + credentials: 'include', + }); + }); + + it('fetches job status', async () => { + mockJsonResponse({}); + + await getMarketplaceJob('job-123'); + + expect(global.fetch).toHaveBeenCalledWith('http://localhost:4545/api/v1/jobs/job-123', { + credentials: 'include', + }); + }); + + it('cancels job', async () => { + mockJsonResponse({}); + + await cancelMarketplaceJob('job-123'); + + expect(global.fetch).toHaveBeenCalledWith('http://localhost:4545/api/v1/jobs/job-123/cancel', { + method: 'POST', + credentials: 'include', + }); + }); +}); diff --git a/ui/src/services/marketplace.ts b/ui/src/services/marketplace.ts new file mode 100644 index 00000000..034e6cbc --- /dev/null +++ b/ui/src/services/marketplace.ts @@ -0,0 +1,109 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +import type { + InstallPluginRequest, + InstallPluginResponse, + JobInfo, + MarketplaceIndex, + MarketplacePluginDetails, + MarketplaceRegistry, +} from '@/types/marketplace'; + +import { fetchApi } from './base'; + +export async function listMarketplaceRegistries(): Promise { + const response = await fetchApi('/api/v1/marketplace/registries'); + + if (!response.ok) { + const text = await response.text(); + throw new Error(text || `Failed to fetch registries (status ${response.status})`); + } + + return response.json() as Promise; +} + +export async function listMarketplacePlugins( + registry: string, + query?: string +): Promise { + const params = new URLSearchParams({ registry }); + if (query && query.trim()) { + params.set('q', query.trim()); + } + + const response = await fetchApi(`/api/v1/marketplace/plugins?${params.toString()}`); + + if (!response.ok) { + const text = await response.text(); + throw new Error(text || `Failed to fetch marketplace plugins (status ${response.status})`); + } + + return response.json() as Promise; +} + +export async function getMarketplacePlugin( + registry: string, + pluginId: string, + version?: string +): Promise { + const params = new URLSearchParams({ registry }); + if (version && version.trim()) { + params.set('version', version.trim()); + } + + const response = await fetchApi( + `/api/v1/marketplace/plugins/${encodeURIComponent(pluginId)}?${params.toString()}` + ); + + if (!response.ok) { + const text = await response.text(); + throw new Error(text || `Failed to fetch plugin details (status ${response.status})`); + } + + return response.json() as Promise; +} + +export async function installMarketplacePlugin( + request: InstallPluginRequest +): Promise { + const response = await fetchApi('/api/v1/plugins/install', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(request), + }); + + if (!response.ok) { + const text = await response.text(); + throw new Error(text || `Failed to install plugin (status ${response.status})`); + } + + return response.json() as Promise; +} + +export async function getMarketplaceJob(jobId: string): Promise { + const response = await fetchApi(`/api/v1/jobs/${encodeURIComponent(jobId)}`); + + if (!response.ok) { + const text = await response.text(); + throw new Error(text || `Failed to fetch job (status ${response.status})`); + } + + return response.json() as Promise; +} + +export async function cancelMarketplaceJob(jobId: string): Promise { + const response = await fetchApi(`/api/v1/jobs/${encodeURIComponent(jobId)}/cancel`, { + method: 'POST', + }); + + if (!response.ok) { + const text = await response.text(); + throw new Error(text || `Failed to cancel job (status ${response.status})`); + } + + return response.json() as Promise; +} diff --git a/ui/src/types/marketplace.ts b/ui/src/types/marketplace.ts new file mode 100644 index 00000000..97050a28 --- /dev/null +++ b/ui/src/types/marketplace.ts @@ -0,0 +1,138 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +export type MarketplaceRegistry = { + id: string; + url: string; +}; + +export type MarketplaceIndex = { + schema_version: number; + plugins: MarketplacePlugin[]; +}; + +export type MarketplacePlugin = { + id: string; + name?: string | null; + description?: string | null; + latest?: string | null; + versions: MarketplacePluginVersion[]; +}; + +export type MarketplacePluginVersion = { + version: string; + manifest_url: string; + signature_url?: string | null; + published_at?: string | null; +}; + +export type MarketplacePluginKind = 'wasm' | 'native'; + +export type PluginBundle = { + url: string; + sha256: string; + size_bytes?: number | null; +}; + +export type PluginCompatibility = { + streamkit?: string | null; + os: string[]; + arch: string[]; +}; + +export type ModelSource = + | { + source: 'huggingface'; + repo_id: string; + revision?: string | null; + files: string[]; + } + | { + source: 'url'; + url: string; + }; + +export type ModelSpec = ModelSource & { + id?: string | null; + name?: string | null; + default?: boolean; + expected_size_bytes?: number | null; + sha256?: string | null; + license?: string | null; + license_url?: string | null; + gated?: boolean; +}; + +export type PluginManifest = { + schema_version: number; + id: string; + name?: string | null; + version: string; + node_kind: string; + kind: MarketplacePluginKind; + description?: string | null; + license?: string | null; + license_url?: string | null; + homepage?: string | null; + repository?: string | null; + entrypoint: string; + bundle: PluginBundle; + compatibility?: PluginCompatibility | null; + models: ModelSpec[]; +}; + +export type MarketplaceSignatureStatus = { + verified: boolean; + key_id?: string | null; + error?: string | null; +}; + +export type MarketplacePluginDetails = { + registry: string; + plugin: MarketplacePlugin; + version: MarketplacePluginVersion; + manifest: PluginManifest; + signature: MarketplaceSignatureStatus; + allow_native_marketplace: boolean; +}; + +export type InstallPluginRequest = { + registry: string; + plugin_id: string; + version?: string | null; + install_models?: boolean; + model_ids?: string[] | null; +}; + +export type InstallPluginResponse = { + job_id: string; +}; + +export type JobStatus = 'queued' | 'running' | 'succeeded' | 'failed' | 'cancelled'; + +export type StepStatus = 'pending' | 'running' | 'succeeded' | 'failed'; + +export type JobProgress = { + bytes_done?: number; + bytes_total?: number; + items_done?: number; + items_total?: number; + current_item?: string; + rate_bytes_per_sec?: number; +}; + +export type JobStep = { + name: string; + status: StepStatus; + progress?: JobProgress; + error?: string; +}; + +export type JobInfo = { + status: JobStatus; + started_at_ms?: number | null; + updated_at_ms: number; + summary: string; + steps: JobStep[]; +}; diff --git a/ui/src/types/types.ts b/ui/src/types/types.ts index d11a7b7b..fc713f90 100644 --- a/ui/src/types/types.ts +++ b/ui/src/types/types.ts @@ -112,4 +112,6 @@ export interface PluginSummary { loaded_at_ms: number; /** Plugin type (wasm or native) */ plugin_type: PluginType; + /** Plugin version from the marketplace record, if available */ + version?: string | null; } diff --git a/ui/src/views/PluginsView.styles.ts b/ui/src/views/PluginsView.styles.ts new file mode 100644 index 00000000..bba0759b --- /dev/null +++ b/ui/src/views/PluginsView.styles.ts @@ -0,0 +1,403 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +import styled from '@emotion/styled'; + +export const Container = styled.div` + box-sizing: border-box; + width: 100%; + min-width: 0; + display: flex; + flex-direction: column; + height: 100%; + min-height: 0; + background: var(--sk-bg); +`; + +export const ContentArea = styled.div` + flex: 1; + overflow-y: auto; + display: flex; + justify-content: center; + min-width: 0; + min-height: 0; +`; + +export const ContentWrapper = styled.div` + width: 100%; + max-width: 1200px; + padding: 40px; + box-sizing: border-box; + + @media (max-width: 768px) { + padding: 24px; + } +`; + +export const BottomSpacer = styled.div` + height: 40px; + flex-shrink: 0; + + @media (max-width: 768px) { + height: 24px; + } +`; + +export const Card = styled.div` + box-sizing: border-box; + width: 100%; + background: var(--sk-panel-bg); + border: 1px solid var(--sk-border); + border-radius: 12px; + padding: 24px; + display: flex; + flex-direction: column; + gap: 18px; + min-width: 0; +`; + +export const TitleRow = styled.div` + display: flex; + align-items: center; + justify-content: space-between; + gap: 12px; + flex-wrap: wrap; +`; + +export const Title = styled.h1` + margin: 0; + font-size: 20px; + font-weight: 700; + color: var(--sk-text); +`; + +export const Subtle = styled.div` + color: var(--sk-text-muted); + font-size: 13px; +`; + +export const NoticeBox = styled.div` + padding: 12px; + border-radius: 10px; + border: 1px solid var(--sk-border); + background: color-mix(in srgb, var(--sk-primary) 8%, transparent); + color: var(--sk-text); + font-size: 13px; +`; + +export const ErrorBox = styled.div` + padding: 12px; + border-radius: 10px; + border: 1px solid var(--sk-border); + background: color-mix(in srgb, var(--sk-danger) 10%, transparent); + color: var(--sk-text); + font-size: 13px; +`; + +export const Section = styled.section` + box-sizing: border-box; + border: 1px solid var(--sk-border); + border-radius: 12px; + padding: 16px; + background: var(--sk-bg); + display: flex; + flex-direction: column; + gap: 12px; + min-width: 0; +`; + +export const SectionTitle = styled.h2` + margin: 0; + font-size: 14px; + font-weight: 700; + color: var(--sk-text); +`; + +export const Row = styled.div` + display: flex; + gap: 10px; + flex-wrap: wrap; + align-items: center; +`; + +export const Label = styled.label` + display: flex; + flex-direction: column; + gap: 6px; + font-size: 12px; + color: var(--sk-text-muted); + min-width: 200px; + flex: 1 1 240px; +`; + +export const Input = styled.input` + padding: 10px 12px; + border-radius: 10px; + border: 1px solid var(--sk-border); + background: var(--sk-panel-bg); + color: var(--sk-text); + font-size: 13px; +`; + +export const Select = styled.select` + padding: 10px 12px; + border-radius: 10px; + border: 1px solid var(--sk-border); + background: var(--sk-panel-bg); + color: var(--sk-text); + font-size: 13px; +`; + +export const PluginList = styled.div` + display: flex; + flex-direction: column; + gap: 12px; +`; + +export const PluginItem = styled.div` + border: 1px solid var(--sk-border); + border-radius: 10px; + padding: 12px; + display: flex; + flex-direction: column; + gap: 6px; + background: var(--sk-panel-bg); +`; + +export const PluginHeader = styled.div` + display: flex; + align-items: center; + justify-content: space-between; + gap: 12px; +`; + +export const PluginMeta = styled.div` + display: flex; + flex-direction: column; + gap: 2px; + font-size: 12px; + color: var(--sk-text-muted); +`; + +export const PluginBadge = styled.span<{ $variant?: 'native' | 'wasm' }>` + background: ${(props) => + props.$variant === 'native' ? 'var(--sk-success)' : 'var(--sk-primary)'}; + color: var(--sk-text-white); + font-size: 10px; + font-weight: 700; + padding: 2px 8px; + border-radius: 999px; + text-transform: uppercase; + letter-spacing: 0.04em; +`; + +export const EmptyState = styled.div` + color: var(--sk-text-muted); + font-size: 12px; +`; + +export const MarketplaceGrid = styled.div` + display: grid; + grid-template-columns: minmax(240px, 1fr) minmax(320px, 2fr); + gap: 16px; + + @media (max-width: 1000px) { + grid-template-columns: 1fr; + } +`; + +export const MarketplaceList = styled.div` + display: flex; + flex-direction: column; + gap: 8px; + overflow-y: auto; +`; + +export const MarketplaceListItem = styled.button<{ $active?: boolean }>` + text-align: left; + padding: 10px 12px; + border-radius: 10px; + border: 1px solid ${(props) => (props.$active ? 'var(--sk-primary)' : 'var(--sk-border)')}; + background: ${(props) => (props.$active ? 'var(--sk-primary-alpha)' : 'var(--sk-panel-bg)')}; + color: var(--sk-text); + cursor: pointer; + display: flex; + flex-direction: column; + gap: 4px; + + &:hover { + border-color: var(--sk-primary); + } +`; + +export const MarketplaceListTitle = styled.div` + font-weight: 600; + font-size: 13px; + color: var(--sk-text); +`; + +export const MarketplaceListDescription = styled.div` + font-size: 12px; + color: var(--sk-text-muted); +`; + +export const DetailsHeader = styled.div` + display: flex; + flex-direction: column; + gap: 6px; +`; + +export const DetailsTitle = styled.div` + font-size: 16px; + font-weight: 700; + color: var(--sk-text); +`; + +export const DetailsDescription = styled.div` + font-size: 13px; + color: var(--sk-text-muted); +`; + +export const KeyValueGrid = styled.div` + display: grid; + grid-template-columns: minmax(120px, 200px) 1fr; + gap: 8px 12px; + font-size: 13px; + color: var(--sk-text); +`; + +export const KeyLabel = styled.div` + color: var(--sk-text-muted); + font-size: 12px; +`; + +export const KeyValue = styled.div` + color: var(--sk-text); + word-break: break-word; +`; + +export const ProgressBar = styled.progress` + width: 100%; + height: 8px; + border-radius: 999px; + overflow: hidden; + + &::-webkit-progress-bar { + background: var(--sk-border); + border-radius: 999px; + } + + &::-webkit-progress-value { + background: var(--sk-primary); + border-radius: 999px; + } + + &::-moz-progress-bar { + background: var(--sk-primary); + border-radius: 999px; + } +`; + +export const StepList = styled.div` + display: flex; + flex-direction: column; + gap: 8px; +`; + +export const StepRow = styled.div` + display: flex; + flex-direction: column; + gap: 4px; + padding: 8px 10px; + border-radius: 8px; + border: 1px solid var(--sk-border); + background: var(--sk-panel-bg); +`; + +export const StepHeader = styled.div` + display: flex; + align-items: center; + justify-content: space-between; + gap: 8px; +`; + +export const StepName = styled.div` + font-size: 12px; + font-weight: 600; + color: var(--sk-text); +`; + +export const StepStatus = styled.div<{ $status: string }>` + font-size: 11px; + font-weight: 600; + color: ${(props) => { + switch (props.$status) { + case 'succeeded': + return 'var(--sk-success)'; + case 'failed': + return 'var(--sk-danger)'; + case 'running': + return 'var(--sk-primary)'; + case 'cancelled': + return 'var(--sk-text-muted)'; + default: + return 'var(--sk-text-muted)'; + } + }}; +`; + +export const StepMeta = styled.div` + font-size: 11px; + color: var(--sk-text-muted); +`; + +export const StepError = styled.div` + font-size: 11px; + color: var(--sk-danger); +`; + +export const SectionDivider = styled.hr` + border: none; + border-top: 1px solid var(--sk-border); + margin: 4px 0; +`; + +export const SubSectionLabel = styled.div` + font-size: 12px; + font-weight: 600; + color: var(--sk-text-muted); + text-transform: uppercase; + letter-spacing: 0.04em; +`; + +export const SignatureValue = styled.span<{ $verified: boolean }>` + color: ${(props) => (props.$verified ? 'var(--sk-success)' : 'var(--sk-warning)')}; +`; + +export const WarningBox = styled.div` + padding: 12px; + border-radius: 10px; + border: 1px solid var(--sk-border); + background: color-mix(in srgb, var(--sk-warning) 10%, transparent); + color: var(--sk-text); + font-size: 13px; +`; + +export const ModelRow = styled.div` + display: flex; + gap: 10px; + align-items: center; + padding: 6px 0; +`; + +export const ModelName = styled.span` + font-weight: 500; + color: var(--sk-text); + font-size: 13px; +`; + +export const ModelMeta = styled.span` + font-size: 11px; + color: var(--sk-text-muted); +`; diff --git a/ui/src/views/PluginsView.tsx b/ui/src/views/PluginsView.tsx new file mode 100644 index 00000000..24861d89 --- /dev/null +++ b/ui/src/views/PluginsView.tsx @@ -0,0 +1,75 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +import React, { useState } from 'react'; + +import { TabsContent, TabsList, TabsRoot, TabsTrigger } from '@/components/ui/Tabs'; +import { usePermissions } from '@/hooks/usePermissions'; + +import AdminNav from './admin/AdminNav'; +import InstalledPluginsTab from './plugins/InstalledPluginsTab'; +import MarketplaceTab from './plugins/MarketplaceTab'; +import { + BottomSpacer, + Card, + Container, + ContentArea, + ContentWrapper, + ErrorBox, + Row, + Subtle, + Title, + TitleRow, +} from './PluginsView.styles'; + +const PluginsView: React.FC = () => { + const { role, isAdmin } = usePermissions(); + const [activeTab, setActiveTab] = useState<'installed' | 'marketplace'>('installed'); + + return ( + + + + + +
+ Plugins + Manage installed plugins and marketplace installs. +
+ + Role: {role ?? 'unknown'} + +
+ + {isAdmin() === false && ( + Admin role required to manage plugins on this server. + )} + + + + setActiveTab(value as typeof activeTab)} + > + + Installed + Marketplace + + + + + + + + + +
+
+
+ +
+ ); +}; + +export default PluginsView; diff --git a/ui/src/views/TokensView.tsx b/ui/src/views/TokensView.tsx index 1952a520..a544e276 100644 --- a/ui/src/views/TokensView.tsx +++ b/ui/src/views/TokensView.tsx @@ -20,6 +20,7 @@ import { useStreamStore } from '@/stores/streamStore'; import { getBasePathname } from '@/utils/baseHref'; import { getLogger } from '@/utils/logger'; +import AdminNav from './admin/AdminNav'; import { MintedTokensTable } from './MintedTokensTable'; import { BottomSpacer, @@ -389,6 +390,8 @@ const TokensView: React.FC = () => { + + {renderError(error)} {renderAuthDisabledNotice(authEnabled)} {renderAdminRequiredNotice(authEnabled, authenticated, role)} diff --git a/ui/src/views/admin/AdminNav.styles.ts b/ui/src/views/admin/AdminNav.styles.ts new file mode 100644 index 00000000..b3d8ee5e --- /dev/null +++ b/ui/src/views/admin/AdminNav.styles.ts @@ -0,0 +1,35 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +import styled from '@emotion/styled'; +import { NavLink } from 'react-router-dom'; + +export const AdminNavBar = styled.nav` + display: flex; + gap: 8px; + flex-wrap: wrap; + padding-bottom: 12px; + border-bottom: 1px solid var(--sk-border); + margin-bottom: 16px; +`; + +export const AdminNavLink = styled(NavLink)` + padding: 6px 12px; + border-radius: 999px; + border: 1px solid transparent; + font-size: 12px; + font-weight: 600; + color: var(--sk-text); + text-decoration: none; + background: var(--sk-panel-bg); + + &:hover { + border-color: var(--sk-primary); + } + + &.active { + border-color: var(--sk-primary); + background: var(--sk-primary-alpha); + } +`; diff --git a/ui/src/views/admin/AdminNav.tsx b/ui/src/views/admin/AdminNav.tsx new file mode 100644 index 00000000..48791ebf --- /dev/null +++ b/ui/src/views/admin/AdminNav.tsx @@ -0,0 +1,22 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +import React from 'react'; + +import { AdminNavBar, AdminNavLink } from './AdminNav.styles'; + +const AdminNav: React.FC = () => { + return ( + + (isActive ? 'active' : '')}> + Plugins + + (isActive ? 'active' : '')}> + Tokens + + + ); +}; + +export default AdminNav; diff --git a/ui/src/views/plugins/InstalledPluginsTab.tsx b/ui/src/views/plugins/InstalledPluginsTab.tsx new file mode 100644 index 00000000..aeaa160a --- /dev/null +++ b/ui/src/views/plugins/InstalledPluginsTab.tsx @@ -0,0 +1,178 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +import { Upload } from 'lucide-react'; +import React, { useCallback, useEffect, useState } from 'react'; + +import ConfirmModal from '@/components/ConfirmModal'; +import { Button } from '@/components/ui/Button'; +import { UploadDropZone } from '@/components/UploadDropZone'; +import { useToast } from '@/context/ToastContext'; +import { usePermissions } from '@/hooks/usePermissions'; +import { deletePlugin, uploadPlugin } from '@/services/plugins'; +import { ensurePluginsLoaded, reloadPlugins, usePluginStore } from '@/stores/pluginStore'; +import type { PluginSummary } from '@/types/types'; +import { getLogger } from '@/utils/logger'; + +import { + EmptyState, + ErrorBox, + NoticeBox, + PluginBadge, + PluginHeader, + PluginItem, + PluginList, + PluginMeta, + Row, + Section, + SectionTitle, +} from '../PluginsView.styles'; + +const logger = getLogger('PluginsInstalledTab'); + +const InstalledPluginsTab: React.FC = () => { + const { can } = usePermissions(); + const toast = useToast(); + const plugins = usePluginStore((s) => s.plugins); + const upsertPlugin = usePluginStore((s) => s.upsertPlugin); + const removePlugin = usePluginStore((s) => s.removePlugin); + + const [installedError, setInstalledError] = useState(null); + const [pendingDelete, setPendingDelete] = useState(null); + const [deletingKind, setDeletingKind] = useState(null); + const [isUploading, setIsUploading] = useState(false); + + useEffect(() => { + ensurePluginsLoaded().catch((err) => { + logger.error('Failed to load plugins', err); + setInstalledError('Failed to load plugins.'); + }); + }, []); + + const handleRefreshInstalled = useCallback(async () => { + setInstalledError(null); + try { + await reloadPlugins(); + } catch (err) { + const message = err instanceof Error ? err.message : 'Failed to refresh plugins.'; + setInstalledError(message); + } + }, []); + + const handlePluginFilesSelected = useCallback( + async (files: FileList) => { + if (!can.loadPlugin) return; + const file = files.item(0); + if (!file) return; + setIsUploading(true); + try { + const summary = await uploadPlugin(file); + upsertPlugin(summary); + toast.success(`Uploaded ${summary.kind}`); + } catch (err) { + const message = err instanceof Error ? err.message : 'Failed to upload plugin.'; + toast.error(message); + } finally { + setIsUploading(false); + } + }, + [can.loadPlugin, upsertPlugin, toast] + ); + + const handleConfirmDelete = useCallback(async () => { + if (!pendingDelete) return; + setDeletingKind(pendingDelete.kind); + try { + await deletePlugin(pendingDelete.kind); + removePlugin(pendingDelete.kind); + toast.success(`Unloaded ${pendingDelete.original_kind}`); + setPendingDelete(null); + } catch (err) { + const message = err instanceof Error ? err.message : 'Failed to unload plugin.'; + toast.error(message); + } finally { + setDeletingKind(null); + } + }, [pendingDelete, removePlugin, toast]); + + return ( + <> +
+ Installed plugins + {installedError && {installedError}} + + + + {plugins.length === 0 ? ( + No plugins loaded yet. + ) : ( + + {plugins.map((plugin) => { + const loadedAt = new Date(plugin.loaded_at_ms).toLocaleString(); + return ( + + +
+ + {plugin.kind} + + {plugin.plugin_type} +
+ +
+ + {plugin.version && Version: {plugin.version}} + Original kind: {plugin.original_kind} + File: {plugin.file_name} + Loaded: {loadedAt} + +
+ ); + })} +
+ )} +
+ +
+ Manual upload + + Manual uploads are trusted code execution. Prefer marketplace installs when possible. + + } + text={isUploading ? 'Uploading…' : 'Drop plugin file here or click to browse'} + hint="Accepted: WASM (.wasm) or native (.so, .dylib, .dll)" + onFilesSelected={handlePluginFilesSelected} + /> +
+ + setPendingDelete(null)} + isLoading={deletingKind !== null} + /> + + ); +}; + +export default InstalledPluginsTab; diff --git a/ui/src/views/plugins/MarketplaceJobPanel.tsx b/ui/src/views/plugins/MarketplaceJobPanel.tsx new file mode 100644 index 00000000..3f9bde4f --- /dev/null +++ b/ui/src/views/plugins/MarketplaceJobPanel.tsx @@ -0,0 +1,86 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +import React from 'react'; + +import { Button } from '@/components/ui/Button'; +import type { JobInfo } from '@/types/marketplace'; + +import { + ErrorBox, + ProgressBar, + Row, + Section, + SectionTitle, + StepError, + StepHeader, + StepList, + StepMeta, + StepName, + StepRow, + StepStatus, + Subtle, +} from '../PluginsView.styles'; +import { formatStepName, formatStepProgress } from './marketplaceFormatters'; + +type MarketplaceJobPanelProps = { + jobId: string | null; + jobInfo: JobInfo | null; + jobError: string | null; + jobProgress: number | null; + jobIsActive: boolean; + onCancel: () => void; + onClear: () => void; +}; + +export const MarketplaceJobPanel: React.FC = ({ + jobId, + jobInfo, + jobError, + jobProgress, + jobIsActive, + onCancel, + onClear, +}) => { + if (!jobId) return null; + + return ( +
+ Install job + {jobError && {jobError}} + {jobInfo && ( + <> + {jobInfo.summary} + {jobProgress !== null && } + + {jobInfo.steps.map((step) => { + const progress = formatStepProgress(step); + return ( + + + {formatStepName(step.name)} + {step.status} + + {progress && {progress}} + {step.error && {step.error}} + + ); + })} + + + {jobIsActive ? ( + + ) : ( + + )} + + + )} +
+ ); +}; diff --git a/ui/src/views/plugins/MarketplaceModelsSection.tsx b/ui/src/views/plugins/MarketplaceModelsSection.tsx new file mode 100644 index 00000000..411672cc --- /dev/null +++ b/ui/src/views/plugins/MarketplaceModelsSection.tsx @@ -0,0 +1,122 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +import React from 'react'; + +import { CheckboxWithLabel } from '@/components/ui/Checkbox'; +import type { MarketplacePluginDetails } from '@/types/marketplace'; + +import { + ModelMeta, + ModelName, + ModelRow, + NoticeBox, + SectionDivider, + SubSectionLabel, +} from '../PluginsView.styles'; +import { formatBytes } from './marketplaceFormatters'; + +type MarketplaceModelsSectionProps = { + hasModels: boolean; + hasModelSelection: boolean; + installModels: boolean; + hasGatedModels: boolean; + models: MarketplacePluginDetails['manifest']['models']; + selectedModelIds: string[]; + onModelToggle: (modelId: string, checked: boolean) => void; + onInstallModelsChange: (value: boolean) => void; +}; + +export const MarketplaceModelsSection: React.FC = ({ + hasModels, + hasModelSelection, + installModels, + hasGatedModels, + models, + selectedModelIds, + onModelToggle, + onInstallModelsChange, +}) => { + if (!hasModels) return null; + return ( + <> + + Models + + + {installModels && !hasModelSelection && ( + Model selection is not available for this plugin. + )} + + ); +}; + +const MarketplaceModelsToggle: React.FC<{ + enabled: boolean; + checked: boolean; + hasGatedModels: boolean; + onChange: (value: boolean) => void; +}> = ({ enabled, checked, hasGatedModels, onChange }) => { + if (!enabled) return null; + return ( + <> + onChange(Boolean(value))} + /> + {hasGatedModels && ( + Gated models require a Hugging Face token configured on the server. + )} + + ); +}; + +const MarketplaceModelsSelection: React.FC<{ + enabled: boolean; + models: MarketplacePluginDetails['manifest']['models']; + selectedModelIds: string[]; + onModelToggle: (modelId: string, checked: boolean) => void; +}> = ({ enabled, models, selectedModelIds, onModelToggle }) => { + if (!enabled) return null; + + return ( + <> + {models.map((model, index) => { + const modelId = model.id ?? `model-${index}`; + const fileCount = model.source === 'huggingface' ? model.files.length : 1; + const displayName = + model.name || model.id || (model.source === 'huggingface' ? model.files[0] : model.url); + const sizeLabel = formatBytes(model.expected_size_bytes ?? undefined) || 'Unknown size'; + return ( + + onModelToggle(modelId, Boolean(value))} + /> + {displayName} + {sizeLabel} + {model.license && {model.license}} + + {fileCount} file{fileCount === 1 ? '' : 's'} + + + ); + })} + + ); +}; diff --git a/ui/src/views/plugins/MarketplacePanels.tsx b/ui/src/views/plugins/MarketplacePanels.tsx new file mode 100644 index 00000000..60f497d7 --- /dev/null +++ b/ui/src/views/plugins/MarketplacePanels.tsx @@ -0,0 +1,440 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +import React from 'react'; + +import { Button } from '@/components/ui/Button'; +import { CheckboxWithLabel } from '@/components/ui/Checkbox'; +import type { MarketplaceIndex, MarketplacePluginDetails } from '@/types/marketplace'; +import type { PluginSummary } from '@/types/types'; + +import { + DetailsDescription, + DetailsHeader, + DetailsTitle, + EmptyState, + KeyLabel, + KeyValue, + KeyValueGrid, + MarketplaceList, + MarketplaceListDescription, + MarketplaceListItem, + MarketplaceListTitle, + NoticeBox, + PluginBadge, + Row, + Section, + SectionDivider, + SectionTitle, + Select, + SignatureValue, + Subtle, + WarningBox, +} from '../PluginsView.styles'; +import { formatBytes } from './marketplaceFormatters'; +import { MarketplaceModelsSection } from './MarketplaceModelsSection'; + +type MarketplaceListPanelProps = { + loading: boolean; + index: MarketplaceIndex | null; + selectedPluginId: string | null; + onSelect: (pluginId: string) => void; +}; + +export const MarketplaceListPanel: React.FC = ({ + loading, + index, + selectedPluginId, + onSelect, +}) => { + return ( +
+ Marketplace + {loading && Loading marketplace...} + {!loading && index?.plugins.length === 0 && No plugins found.} + + {index?.plugins.map((plugin) => ( + onSelect(plugin.id)} + > + {plugin.name ?? plugin.id} + + {plugin.description ?? 'No description provided.'} + + + ))} + +
+ ); +}; + +const buildInstallBlockedReasons = ({ + canLoadPlugin, + signatureVerified, + requiresLicenseAcceptance, + licenseAccepted, + missingModelSelection, + allowNativeMarketplace, + installedPlugin, + installModels, + hasModels, + versionMismatch, + installedVersion, + selectedVersion, +}: { + canLoadPlugin: boolean; + signatureVerified: boolean; + requiresLicenseAcceptance: boolean; + licenseAccepted: boolean; + missingModelSelection: boolean; + allowNativeMarketplace: boolean; + installedPlugin: PluginSummary | null; + installModels: boolean; + hasModels: boolean; + versionMismatch: boolean; + installedVersion: string | null; + selectedVersion: string | null; +}) => { + const reasons: string[] = []; + if (!canLoadPlugin) { + reasons.push('Insufficient permissions to install plugins.'); + } + if (!signatureVerified) { + reasons.push('Plugin signature is not verified.'); + } + if (requiresLicenseAcceptance && !licenseAccepted) { + reasons.push('License acceptance required.'); + } + if (missingModelSelection) { + reasons.push('Select at least one model or disable model downloads.'); + } + if (!allowNativeMarketplace) { + reasons.push('Native marketplace installs are disabled in server config.'); + } + if (installedPlugin) { + if (!installModels) { + reasons.push('Plugin already installed. Enable model downloads or uninstall to replace it.'); + } else if (!hasModels) { + reasons.push('Plugin already installed and has no models to download.'); + } else if (versionMismatch && installedVersion) { + reasons.push( + `Installed version ${installedVersion} does not match selected version ${selectedVersion}. Select ${installedVersion} to download models.` + ); + } + } + return reasons; +}; + +const getInstallLabel = ({ + installing, + installedPlugin, + installModels, + hasModels, +}: { + installing: boolean; + installedPlugin: PluginSummary | null; + installModels: boolean; + hasModels: boolean; +}) => { + if (installing) return 'Starting...'; + if (!installedPlugin) return 'Install'; + if (installModels && hasModels) return 'Download models'; + return 'Installed'; +}; + +const MarketplaceInstalledNotice: React.FC<{ + installedPlugin: PluginSummary | null; + installedVersion: string | null; + versionMismatch: boolean; +}> = ({ installedPlugin, installedVersion, versionMismatch }) => { + if (!installedPlugin) return null; + return ( + + Installed{installedVersion ? ` (version ${installedVersion})` : ''}. + {versionMismatch && installedVersion && ( + <> Select version {installedVersion} to download models. + )} + + ); +}; + +const MarketplaceInstallWarnings: React.FC<{ + canInstall: boolean; + installBlockedReasons: string[]; +}> = ({ canInstall, installBlockedReasons }) => { + if (canInstall || installBlockedReasons.length === 0) return null; + return ( + +
    + {installBlockedReasons.map((reason) => ( +
  • {reason}
  • + ))} +
+
+ ); +}; + +type MarketplaceDetailsPanelProps = { + details: MarketplacePluginDetails | null; + selectedVersion: string | null; + loading: boolean; + licenseAccepted: boolean; + requiresLicenseAcceptance: boolean; + installModels: boolean; + hasModels: boolean; + hasModelSelection: boolean; + hasGatedModels: boolean; + missingModelSelection: boolean; + installedPlugin: PluginSummary | null; + selectedModelIds: string[]; + onModelToggle: (modelId: string, checked: boolean) => void; + canLoadPlugin: boolean; + canInstall: boolean; + installing: boolean; + onVersionChange: (value: string) => void; + onLicenseAccepted: (value: boolean) => void; + onInstallModelsChange: (value: boolean) => void; + onInstall: () => void; +}; + +export const MarketplaceDetailsPanel: React.FC = ({ + details, + selectedVersion, + loading, + licenseAccepted, + requiresLicenseAcceptance, + installModels, + hasModels, + hasModelSelection, + hasGatedModels, + missingModelSelection, + installedPlugin, + selectedModelIds, + onModelToggle, + canLoadPlugin, + canInstall, + installing, + onVersionChange, + onLicenseAccepted, + onInstallModelsChange, + onInstall, +}) => { + if (loading) return ; + if (!details) return ; + + const installedVersion = installedPlugin?.version ?? null; + const versionMismatch = + Boolean(installedVersion) && Boolean(selectedVersion) && installedVersion !== selectedVersion; + const installBlockedReasons = buildInstallBlockedReasons({ + canLoadPlugin, + signatureVerified: details.signature.verified === true, + requiresLicenseAcceptance, + licenseAccepted, + missingModelSelection, + allowNativeMarketplace: details.manifest.kind !== 'native' || details.allow_native_marketplace, + installedPlugin, + installModels, + hasModels, + versionMismatch, + installedVersion, + selectedVersion, + }); + const installLabel = getInstallLabel({ installing, installedPlugin, installModels, hasModels }); + + return ( +
+ Details + + + + + {requiresLicenseAcceptance && } + + + + + + + +
+ ); +}; + +const MarketplaceDetailsLoading: React.FC = () => ( +
+ Details + Loading plugin details... +
+); + +const MarketplaceDetailsEmpty: React.FC = () => ( +
+ Details + Select a plugin to view. +
+); + +const MarketplaceDetailsHeader: React.FC<{ details: MarketplacePluginDetails }> = ({ details }) => ( + + {details.manifest.name ?? details.manifest.id} + {details.manifest.description && ( + {details.manifest.description} + )} + + {details.manifest.kind} + + +); + +type MarketplaceDetailsFieldsProps = { + details: MarketplacePluginDetails; + selectedVersion: string | null; + onVersionChange: (value: string) => void; +}; + +const MarketplaceDetailsFields: React.FC = ({ + details, + selectedVersion, + onVersionChange, +}) => { + const signatureLabel = details.signature.verified + ? `\u2713 Verified (${details.signature.key_id ?? 'trusted key'})` + : `\u26A0 Unverified (${details.signature.error ?? 'unknown'})`; + const modelFileCount = details.manifest.models.reduce((count, model) => { + if (model.source === 'huggingface') { + return count + model.files.length; + } + return count + 1; + }, 0); + + return ( + + Version + + + + Node kind + {details.manifest.node_kind} + Entry point + {details.manifest.entrypoint} + Bundle size + + {formatBytes(details.manifest.bundle.size_bytes ?? undefined) || 'Unknown'} + + Signature + + {signatureLabel} + + License + {details.manifest.license || details.manifest.license_url || 'Unknown'} + {details.manifest.models.length > 0 && ( + <> + Models + {modelFileCount} files + + )} + + + ); +}; + +const MarketplaceCompatibilityRows: React.FC<{ + compatibility: MarketplacePluginDetails['manifest']['compatibility']; +}> = ({ compatibility }) => { + if (!compatibility) return null; + return ( + <> + {compatibility.streamkit && ( + <> + StreamKit + {compatibility.streamkit} + + )} + {compatibility.os?.length ? ( + <> + OS + {compatibility.os.join(', ')} + + ) : null} + {compatibility.arch?.length ? ( + <> + Arch + {compatibility.arch.join(', ')} + + ) : null} + + ); +}; + +const MarketplaceNativeNotice: React.FC<{ + kind: MarketplacePluginDetails['manifest']['kind']; + allowNativeMarketplace: boolean; +}> = ({ kind, allowNativeMarketplace }) => { + if (kind !== 'native') return null; + if (!allowNativeMarketplace) { + return ( + + Native marketplace installs are disabled in server config. Set{' '} + allow_native_marketplace = true to enable. + + ); + } + return Native plugins run in-process with full server access.; +}; + +const MarketplaceLicenseAcceptance: React.FC<{ + enabled: boolean; + checked: boolean; + onChange: (value: boolean) => void; +}> = ({ enabled, checked, onChange }) => { + if (!enabled) return null; + return ( + onChange(Boolean(value))} + /> + ); +}; diff --git a/ui/src/views/plugins/MarketplaceTab.tsx b/ui/src/views/plugins/MarketplaceTab.tsx new file mode 100644 index 00000000..3de85eff --- /dev/null +++ b/ui/src/views/plugins/MarketplaceTab.tsx @@ -0,0 +1,498 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +import React, { useCallback, useEffect, useMemo, useState } from 'react'; + +import { useToast } from '@/context/ToastContext'; +import { usePermissions } from '@/hooks/usePermissions'; +import { installMarketplacePlugin } from '@/services/marketplace'; +import { ensurePluginsLoaded, reloadPlugins, usePluginStore } from '@/stores/pluginStore'; +import type { JobInfo, MarketplaceIndex, MarketplacePluginDetails } from '@/types/marketplace'; +import type { PluginSummary } from '@/types/types'; +import { getLogger } from '@/utils/logger'; + +import { + ErrorBox, + Input, + Label, + MarketplaceGrid, + NoticeBox, + Row, + Section, + SectionTitle, + Select, +} from '../PluginsView.styles'; +import { computeJobProgress } from './marketplaceFormatters'; +import { + useMarketplaceDetails, + useMarketplaceIndex, + useMarketplaceJob, + useMarketplaceRegistries, +} from './marketplaceHooks'; +import { MarketplaceJobPanel } from './MarketplaceJobPanel'; +import { MarketplaceDetailsPanel, MarketplaceListPanel } from './MarketplacePanels'; + +const logger = getLogger('MarketplaceTab'); + +type MarketplaceTabProps = { + active: boolean; +}; + +const syncSelectedPluginId = ( + index: MarketplaceIndex | null, + selectedPluginId: string | null, + setSelectedPluginId: React.Dispatch> +) => { + if (!index) { + setSelectedPluginId(null); + return; + } + if (selectedPluginId && index.plugins.some((plugin) => plugin.id === selectedPluginId)) { + return; + } + setSelectedPluginId(index.plugins[0]?.id ?? null); +}; + +const syncInstallModels = ( + details: MarketplacePluginDetails | null, + setInstallModels: React.Dispatch> +) => { + if (!details) { + setInstallModels(false); + return; + } + setInstallModels(details.manifest.models.length > 0); +}; + +const defaultModelSelection = (details: MarketplacePluginDetails | null) => { + const models = details?.manifest.models ?? []; + if (models.length === 0) return []; + if (!models.every((model) => model.id)) { + return []; + } + const defaultIds = models.filter((model) => model.default).map((model) => model.id as string); + if (defaultIds.length > 0) { + return defaultIds; + } + return models.map((model) => model.id as string); +}; + +const deriveModelFlags = (details: MarketplacePluginDetails | null, selectedModelIds: string[]) => { + const models = details?.manifest.models ?? []; + const hasModels = models.length > 0; + const hasModelSelection = hasModels && models.every((model) => model.id); + const selectedModels = + hasModelSelection && selectedModelIds.length > 0 + ? models.filter((model) => selectedModelIds.includes(model.id as string)) + : models; + const hasGatedModels = selectedModels.some((model) => model.gated); + const hasModelLicenses = selectedModels.some((model) => model.license || model.license_url); + const requiresLicenseAcceptance = Boolean( + details?.manifest.license || details?.manifest.license_url || hasModelLicenses + ); + return { hasModels, hasModelSelection, hasGatedModels, requiresLicenseAcceptance }; +}; + +const isJobActive = (jobInfo: JobInfo | null) => + jobInfo?.status === 'queued' || jobInfo?.status === 'running'; + +const isLicenseSatisfied = (requiresLicenseAcceptance: boolean, licenseAccepted: boolean) => + !requiresLicenseAcceptance || licenseAccepted; + +const isModelSelectionSatisfied = (installModels: boolean, modelSelectionRequired: boolean) => + !installModels || !modelSelectionRequired; + +const isNativeMarketplaceAllowed = (details: MarketplacePluginDetails) => + details.manifest.kind !== 'native' || details.allow_native_marketplace; + +const isInstallAllowedForInstalled = ({ + isInstalled, + installModels, + hasModels, + installedVersion, + selectedVersion, +}: { + isInstalled: boolean; + installModels: boolean; + hasModels: boolean; + installedVersion: string | null; + selectedVersion: string | null; +}) => { + if (!isInstalled) return true; + if (!installModels || !hasModels) return false; + if (installedVersion && selectedVersion && installedVersion !== selectedVersion) return false; + return true; +}; + +const computeCanInstall = ({ + details, + canLoadPlugin, + requiresLicenseAcceptance, + licenseAccepted, + jobInfo, + installModels, + modelSelectionRequired, + hasModels, + isInstalled, + installedVersion, + selectedVersion, +}: { + details: MarketplacePluginDetails | null; + canLoadPlugin: boolean; + requiresLicenseAcceptance: boolean; + licenseAccepted: boolean; + jobInfo: JobInfo | null; + installModels: boolean; + modelSelectionRequired: boolean; + hasModels: boolean; + isInstalled: boolean; + installedVersion: string | null; + selectedVersion: string | null; +}) => { + if (!details) return false; + if (!canLoadPlugin) return false; + if (details.signature.verified !== true) return false; + if (!isLicenseSatisfied(requiresLicenseAcceptance, licenseAccepted)) return false; + if (!isModelSelectionSatisfied(installModels, modelSelectionRequired)) return false; + if (isJobActive(jobInfo)) return false; + if (!isNativeMarketplaceAllowed(details)) return false; + if ( + !isInstallAllowedForInstalled({ + isInstalled, + installModels, + hasModels, + installedVersion, + selectedVersion, + }) + ) { + return false; + } + return true; +}; + +const useInstalledPlugin = ( + details: MarketplacePluginDetails | null, + active: boolean +): PluginSummary | null => { + const installedPlugins = usePluginStore((state) => state.plugins); + + useEffect(() => { + if (!active) return; + ensurePluginsLoaded().catch((err) => { + logger.error('Failed to load installed plugins', err); + }); + }, [active]); + + return useMemo(() => { + if (!details) return null; + return ( + installedPlugins.find((plugin) => plugin.original_kind === details.manifest.node_kind) ?? null + ); + }, [details, installedPlugins]); +}; + +const useModelFlags = ( + details: MarketplacePluginDetails | null, + selectedModelIds: string[], + installModels: boolean +) => { + const flags = useMemo( + () => deriveModelFlags(details, selectedModelIds), + [details, selectedModelIds] + ); + const missingModelSelection = + installModels && flags.hasModelSelection && selectedModelIds.length === 0; + return { ...flags, missingModelSelection }; +}; + +const useDebouncedSearch = (initialValue: string, delayMs: number) => { + const [searchInput, setSearchInput] = useState(initialValue); + const [debouncedSearch, setDebouncedSearch] = useState(initialValue); + + useEffect(() => { + const timeout = window.setTimeout(() => { + setDebouncedSearch(searchInput.trim()); + }, delayMs); + return () => window.clearTimeout(timeout); + }, [searchInput, delayMs]); + + return { searchInput, setSearchInput, debouncedSearch }; +}; + +const useJobStatus = (jobInfo: JobInfo | null) => { + const jobProgress = useMemo(() => computeJobProgress(jobInfo), [jobInfo]); + const jobIsActive = jobInfo?.status === 'queued' || jobInfo?.status === 'running'; + return { jobProgress, jobIsActive }; +}; + +const startInstall = async ({ + details, + installModels, + selectedModelIds, + resetJob, + setInstalling, + setJobId, + toast, +}: { + details: MarketplacePluginDetails | null; + installModels: boolean; + selectedModelIds: string[]; + resetJob: () => void; + setInstalling: React.Dispatch>; + setJobId: React.Dispatch>; + toast: ReturnType; +}) => { + if (!details) return; + const hasModelSelection = details.manifest.models.every((model) => model.id); + const modelIds = + installModels && hasModelSelection && selectedModelIds.length > 0 + ? selectedModelIds + : undefined; + setInstalling(true); + try { + const response = await installMarketplacePlugin({ + registry: details.registry, + plugin_id: details.manifest.id, + version: details.version.version, + install_models: installModels, + model_ids: modelIds, + }); + setJobId(response.job_id); + resetJob(); + } catch (err) { + const message = err instanceof Error ? err.message : 'Failed to start install.'; + toast.error(message); + } finally { + setInstalling(false); + } +}; + +const MarketplaceTab: React.FC = ({ active }) => { + const { can } = usePermissions(); + const toast = useToast(); + + const { + registries, + selectedRegistry, + setSelectedRegistry, + loading: registriesLoading, + loaded: registriesLoaded, + error: registriesError, + } = useMarketplaceRegistries(active); + + const { searchInput, setSearchInput, debouncedSearch } = useDebouncedSearch('', 300); + const [selectedPluginId, setSelectedPluginId] = useState(null); + const [licenseAccepted, setLicenseAccepted] = useState(false); + const [installModels, setInstallModels] = useState(false); + const [selectedModelIds, setSelectedModelIds] = useState([]); + const [jobId, setJobId] = useState(null); + const [installing, setInstalling] = useState(false); + + const { + index, + loading: indexLoading, + error: indexError, + } = useMarketplaceIndex(active, selectedRegistry, debouncedSearch); + + const { + details, + selectedVersion, + setSelectedVersion, + loading: detailsLoading, + error: detailsError, + } = useMarketplaceDetails(active, selectedRegistry, selectedPluginId); + + useEffect(() => { + syncSelectedPluginId(index, selectedPluginId, setSelectedPluginId); + }, [index, selectedPluginId]); + + useEffect(() => { + setLicenseAccepted(false); + }, [selectedPluginId, selectedVersion]); + + useEffect(() => { + syncInstallModels(details, setInstallModels); + setSelectedModelIds(defaultModelSelection(details)); + }, [details, selectedVersion]); + + const jobCallbacks = useMemo( + () => ({ + onSuccess: (info: JobInfo) => { + toast.success('Plugin installed successfully.'); + reloadPlugins().catch((err) => logger.error('Failed to refresh plugins', err)); + logger.info(info.summary); + }, + onFailure: (info: JobInfo) => { + toast.error(info.summary || 'Plugin install failed.'); + }, + onCancelled: () => { + toast.info('Plugin install cancelled.'); + }, + }), + [toast] + ); + + const { jobInfo, jobError, cancelJob, resetJob } = useMarketplaceJob(jobId, jobCallbacks); + + const handleInstall = useCallback(() => { + void startInstall({ + details, + installModels, + selectedModelIds, + resetJob, + setInstalling, + setJobId, + toast, + }); + }, [details, installModels, selectedModelIds, resetJob, setInstalling, setJobId, toast]); + + const handleClearJob = useCallback(() => { + setJobId(null); + resetJob(); + }, [resetJob]); + + const handleSelectRegistry = useCallback( + (value: string) => { + setSelectedRegistry(value); + setSelectedPluginId(null); + }, + [setSelectedRegistry] + ); + + const handleSelectPlugin = useCallback((pluginId: string) => { + setSelectedPluginId(pluginId); + }, []); + const handleToggleModel = useCallback((modelId: string, checked: boolean) => { + setSelectedModelIds((current) => { + if (checked) { + return current.includes(modelId) ? current : [...current, modelId]; + } + return current.filter((id) => id !== modelId); + }); + }, []); + + const installedPlugin = useInstalledPlugin(details, active); + const { + hasModels, + hasModelSelection, + hasGatedModels, + requiresLicenseAcceptance, + missingModelSelection, + } = useModelFlags(details, selectedModelIds, installModels); + const installedVersion = installedPlugin?.version ?? null; + + const canInstall = useMemo( + () => + computeCanInstall({ + details, + canLoadPlugin: can.loadPlugin, + requiresLicenseAcceptance, + licenseAccepted, + jobInfo, + installModels, + modelSelectionRequired: missingModelSelection, + hasModels, + isInstalled: Boolean(installedPlugin), + installedVersion, + selectedVersion, + }), + [ + details, + can.loadPlugin, + requiresLicenseAcceptance, + licenseAccepted, + jobInfo, + installModels, + missingModelSelection, + hasModels, + installedPlugin, + installedVersion, + selectedVersion, + ] + ); + + const { jobProgress, jobIsActive } = useJobStatus(jobInfo); + const loadingMarketplace = registriesLoading || indexLoading; + const marketplaceError = registriesError ?? indexError ?? detailsError; + + return ( + <> + {marketplaceError && {marketplaceError}} + {!marketplaceError && registriesLoaded && registries.length === 0 && ( + No registries configured for this server. + )} +
+ Registry + + + + +
+ + + + + + + + + ); +}; + +export default MarketplaceTab; diff --git a/ui/src/views/plugins/marketplaceFormatters.ts b/ui/src/views/plugins/marketplaceFormatters.ts new file mode 100644 index 00000000..f384df30 --- /dev/null +++ b/ui/src/views/plugins/marketplaceFormatters.ts @@ -0,0 +1,61 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +import type { JobInfo, JobStep } from '@/types/marketplace'; + +export const formatBytes = (bytes?: number | null): string => { + if (bytes === undefined || bytes === null) return ''; + if (bytes <= 0) return '0 B'; + const units = ['B', 'KB', 'MB', 'GB', 'TB']; + let value = bytes; + let unitIndex = 0; + while (value >= 1024 && unitIndex < units.length - 1) { + value /= 1024; + unitIndex += 1; + } + return `${value.toFixed(value < 10 && unitIndex > 0 ? 1 : 0)} ${units[unitIndex]}`; +}; + +export const formatStepName = (name: string): string => name.replace(/_/g, ' '); + +export const formatStepProgress = (step: JobStep): string | null => { + if (!step.progress) return null; + const parts: string[] = []; + + if (step.progress.current_item) { + parts.push(step.progress.current_item); + } + + if (step.progress.bytes_done !== undefined) { + const total = step.progress.bytes_total ? ` / ${formatBytes(step.progress.bytes_total)}` : ''; + parts.push(`${formatBytes(step.progress.bytes_done)}${total}`); + } + + if (step.progress.items_done !== undefined) { + const total = step.progress.items_total ? ` / ${step.progress.items_total}` : ''; + parts.push(`${step.progress.items_done}${total} items`); + } + + if (step.progress.rate_bytes_per_sec) { + parts.push(`${formatBytes(step.progress.rate_bytes_per_sec)}/s`); + } + + return parts.length > 0 ? parts.join(' • ') : null; +}; + +export const computeJobProgress = (jobInfo?: JobInfo | null): number | null => { + if (!jobInfo) return null; + const totalSteps = jobInfo.steps.length || 1; + const completed = jobInfo.steps.filter((step) => step.status === 'succeeded').length; + let progress = completed / totalSteps; + const running = jobInfo.steps.find((step) => step.status === 'running'); + if ( + running?.progress?.bytes_done !== undefined && + running.progress.bytes_total && + running.progress.bytes_total > 0 + ) { + progress += running.progress.bytes_done / running.progress.bytes_total / totalSteps; + } + return Math.min(1, progress); +}; diff --git a/ui/src/views/plugins/marketplaceHooks.ts b/ui/src/views/plugins/marketplaceHooks.ts new file mode 100644 index 00000000..daaef493 --- /dev/null +++ b/ui/src/views/plugins/marketplaceHooks.ts @@ -0,0 +1,236 @@ +// SPDX-FileCopyrightText: © 2025 StreamKit Contributors +// +// SPDX-License-Identifier: MPL-2.0 + +import { useCallback, useEffect, useState } from 'react'; + +import { + cancelMarketplaceJob, + getMarketplaceJob, + getMarketplacePlugin, + listMarketplacePlugins, + listMarketplaceRegistries, +} from '@/services/marketplace'; +import type { + JobInfo, + MarketplaceIndex, + MarketplacePluginDetails, + MarketplaceRegistry, +} from '@/types/marketplace'; + +export const useMarketplaceRegistries = (active: boolean) => { + const [registries, setRegistries] = useState([]); + const [selectedRegistry, setSelectedRegistry] = useState(''); + const [loading, setLoading] = useState(false); + const [loaded, setLoaded] = useState(false); + const [error, setError] = useState(null); + + useEffect(() => { + if (!active || loaded) return; + let cancelled = false; + setLoading(true); + setError(null); + + (async () => { + try { + const registryList = await listMarketplaceRegistries(); + if (cancelled) return; + setRegistries(registryList); + setSelectedRegistry((prev) => prev || registryList[0]?.id || ''); + } catch (err) { + if (!cancelled) { + const message = err instanceof Error ? err.message : 'Failed to load registries.'; + setError(message); + } + } finally { + if (!cancelled) { + setLoading(false); + setLoaded(true); + } + } + })(); + + return () => { + cancelled = true; + }; + }, [active, loaded]); + + return { + registries, + selectedRegistry, + setSelectedRegistry, + loading, + loaded, + error, + }; +}; + +export const useMarketplaceIndex = (active: boolean, registry: string, query: string) => { + const [index, setIndex] = useState(null); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + + useEffect(() => { + if (!active) return; + if (!registry) { + setIndex(null); + return; + } + + let cancelled = false; + setLoading(true); + setError(null); + + (async () => { + try { + const data = await listMarketplacePlugins(registry, query); + if (cancelled) return; + setIndex(data); + } catch (err) { + if (!cancelled) { + const message = err instanceof Error ? err.message : 'Failed to load plugins.'; + setError(message); + setIndex(null); + } + } finally { + if (!cancelled) setLoading(false); + } + })(); + + return () => { + cancelled = true; + }; + }, [active, registry, query]); + + return { index, loading, error }; +}; + +export const useMarketplaceDetails = ( + active: boolean, + registry: string, + pluginId: string | null +) => { + const [details, setDetails] = useState(null); + const [selectedVersion, setSelectedVersion] = useState(null); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + + useEffect(() => { + setSelectedVersion(null); + }, [pluginId, registry]); + + useEffect(() => { + if (!active) return; + if (!registry || !pluginId) { + setDetails(null); + return; + } + + let cancelled = false; + setLoading(true); + setError(null); + + (async () => { + try { + const pluginDetails = await getMarketplacePlugin( + registry, + pluginId, + selectedVersion ?? undefined + ); + if (cancelled) return; + setDetails(pluginDetails); + setSelectedVersion((prev) => prev ?? pluginDetails.version.version); + } catch (err) { + if (!cancelled) { + const message = err instanceof Error ? err.message : 'Failed to load plugin details.'; + setError(message); + setDetails(null); + } + } finally { + if (!cancelled) setLoading(false); + } + })(); + + return () => { + cancelled = true; + }; + }, [active, registry, pluginId, selectedVersion]); + + return { details, selectedVersion, setSelectedVersion, loading, error }; +}; + +type JobCallbacks = { + onSuccess: (job: JobInfo) => void; + onFailure: (job: JobInfo) => void; + onCancelled: (job: JobInfo) => void; +}; + +export const useMarketplaceJob = (jobId: string | null, callbacks: JobCallbacks) => { + const [jobInfo, setJobInfo] = useState(null); + const [jobError, setJobError] = useState(null); + + useEffect(() => { + if (!jobId) { + setJobInfo(null); + setJobError(null); + return; + } + + let cancelled = false; + let timeoutId: number | null = null; + + const poll = async () => { + try { + const info = await getMarketplaceJob(jobId); + if (cancelled) return; + setJobInfo(info); + setJobError(null); + + if (info.status === 'succeeded') { + callbacks.onSuccess(info); + return; + } + if (info.status === 'failed') { + callbacks.onFailure(info); + return; + } + if (info.status === 'cancelled') { + callbacks.onCancelled(info); + return; + } + + timeoutId = window.setTimeout(poll, 2000); + } catch (err) { + if (!cancelled) { + const message = err instanceof Error ? err.message : 'Failed to fetch job status.'; + setJobError(message); + } + } + }; + + poll(); + + return () => { + cancelled = true; + if (timeoutId) window.clearTimeout(timeoutId); + }; + }, [jobId, callbacks]); + + const cancelJob = useCallback(async () => { + if (!jobId) return; + try { + const info = await cancelMarketplaceJob(jobId); + setJobInfo(info); + } catch (err) { + const message = err instanceof Error ? err.message : 'Failed to cancel job.'; + setJobError(message); + } + }, [jobId]); + + const resetJob = useCallback(() => { + setJobInfo(null); + setJobError(null); + }, []); + + return { jobInfo, jobError, cancelJob, resetJob }; +}; From 6c3fbae0386d7a329b2c976f6a68ad15e5b60980 Mon Sep 17 00:00:00 2001 From: streamer45 Date: Wed, 28 Jan 2026 15:23:55 +0100 Subject: [PATCH 2/6] chore: update sample config --- samples/skit.toml | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/samples/skit.toml b/samples/skit.toml index be9b34a0..5bdf96c2 100644 --- a/samples/skit.toml +++ b/samples/skit.toml @@ -204,15 +204,17 @@ directory = ".plugins" # Allow installing native plugins from the marketplace (default: false) # Native plugins run in-process; only enable for fully trusted registries. -# allow_native_marketplace = false +# The official StreamKit marketplace uses signed manifests and curated plugins. +allow_native_marketplace = true # Minisign public keys (contents of `.pub` files) trusted for marketplace manifests -# trusted_pubkeys = [ -# "untrusted comment: minisign public key 81C485A94492F33F\nRWQ/85JEqYXEgX+2kl7Rwd8AcpVjYciSLzvLggzivbGyIrDPjfmcqjYP\n", -# ] +# This is the official StreamKit marketplace signing key. +trusted_pubkeys = [ + "untrusted comment: minisign public key 81C485A94492F33F\nRWQ/85JEqYXEgX+2kl7Rwd8AcpVjYciSLzvLggzivbGyIrDPjfmcqjYP\n", +] # Marketplace registry index URLs (e.g., https://example.com/index.json) -# registries = ["https://streamkit.dev/marketplace/index.json"] +registries = ["https://streamkit.dev/marketplace/index.json"] # Marketplace security controls # allow_model_urls = false From e08d0ea1fe1ddf46da678d96f5e4bf560909fcf4 Mon Sep 17 00:00:00 2001 From: streamer45 Date: Wed, 28 Jan 2026 17:45:49 +0100 Subject: [PATCH 3/6] chore: use official repo for model downloads --- Dockerfile.demo | 23 ++-- Dockerfile.full | 25 ++-- Dockerfile.full-gpu | 30 +++-- justfile | 117 +++++++++++++----- plugins/native/matcha/download-models.sh | 12 +- plugins/native/piper/download-models.sh | 8 +- .../native/piper/download-piper-spanish.sh | 8 +- 7 files changed, 144 insertions(+), 79 deletions(-) diff --git a/Dockerfile.demo b/Dockerfile.demo index ccd4f9d9..8e2fbca7 100644 --- a/Dockerfile.demo +++ b/Dockerfile.demo @@ -155,12 +155,13 @@ RUN --mount=type=cache,id=cargo-registry-whisper,target=/usr/local/cargo/registr # Download Whisper models (demo image uses a single tiny multilingual model). # - ggml-tiny-q5_1.bin: Tiny multilingual STT (quantized) +# NOTE: This model is not yet uploaded to streamkit/whisper-models, using original source # - silero_vad.onnx: VAD model for Whisper RUN mkdir -p /build/models && \ curl -L -o /build/models/ggml-tiny-q5_1.bin \ https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny-q5_1.bin && \ curl -L -o /build/models/silero_vad.onnx \ - https://raw.githubusercontent.com/snakers4/silero-vad/master/src/silero_vad/data/silero_vad.onnx + https://huggingface.co/streamkit/whisper-models/resolve/main/silero_vad.onnx # Stage 5: Build Kokoro TTS plugin FROM rust:1.92-slim-bookworm AS kokoro-builder @@ -205,7 +206,8 @@ RUN --mount=type=cache,id=cargo-registry-kokoro,target=/usr/local/cargo/registry # Download Kokoro TTS models RUN mkdir -p /build/models && \ cd /build/models && \ - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/kokoro-multi-lang-v1_1.tar.bz2 && \ + curl -L -o kokoro-multi-lang-v1_1.tar.bz2 \ + https://huggingface.co/streamkit/kokoro-models/resolve/main/kokoro-multi-lang-v1_1.tar.bz2 && \ tar xf kokoro-multi-lang-v1_1.tar.bz2 && \ rm kokoro-multi-lang-v1_1.tar.bz2 @@ -248,7 +250,8 @@ RUN --mount=type=cache,id=cargo-registry-piper,target=/usr/local/cargo/registry # Download Piper TTS models (English + Spanish for translation output) RUN mkdir -p /build/models && \ cd /build/models && \ - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/vits-piper-en_US-libritts_r-medium.tar.bz2 && \ + curl -L -o vits-piper-en_US-libritts_r-medium.tar.bz2 \ + https://huggingface.co/streamkit/piper-models/resolve/main/vits-piper-en_US-libritts_r-medium.tar.bz2 && \ tar xf vits-piper-en_US-libritts_r-medium.tar.bz2 && \ rm vits-piper-en_US-libritts_r-medium.tar.bz2 && \ cd vits-piper-en_US-libritts_r-medium && \ @@ -256,7 +259,8 @@ RUN mkdir -p /build/models && \ ln -sf en_US-libritts_r-medium.onnx model.onnx; \ fi && \ cd /build/models && \ - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/vits-piper-es_MX-claude-high.tar.bz2 && \ + curl -L -o vits-piper-es_MX-claude-high.tar.bz2 \ + https://huggingface.co/streamkit/piper-models/resolve/main/vits-piper-es_MX-claude-high.tar.bz2 && \ tar xf vits-piper-es_MX-claude-high.tar.bz2 && \ rm vits-piper-es_MX-claude-high.tar.bz2 && \ cd vits-piper-es_MX-claude-high && \ @@ -303,7 +307,8 @@ RUN --mount=type=cache,id=cargo-registry-sensevoice,target=/usr/local/cargo/regi # Download SenseVoice models RUN mkdir -p /build/models && \ cd /build/models && \ - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 && \ + curl -L -o sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 \ + https://huggingface.co/streamkit/sensevoice-models/resolve/main/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 && \ tar xf sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 && \ rm sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 @@ -346,7 +351,7 @@ RUN --mount=type=cache,id=cargo-registry-vad,target=/usr/local/cargo/registry \ # Download ten-vad model RUN mkdir -p /build/models && \ curl -L -o /build/models/ten-vad.onnx \ - https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/ten-vad.onnx + https://huggingface.co/streamkit/vad-models/resolve/main/ten-vad.onnx # Stage 9: Build Matcha TTS plugin FROM rust:1.92-slim-bookworm AS matcha-builder @@ -387,11 +392,13 @@ RUN --mount=type=cache,id=cargo-registry-matcha,target=/usr/local/cargo/registry # Download Matcha TTS models RUN mkdir -p /build/models && \ cd /build/models && \ - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/matcha-icefall-en_US-ljspeech.tar.bz2 && \ + curl -L -o matcha-icefall-en_US-ljspeech.tar.bz2 \ + https://huggingface.co/streamkit/matcha-models/resolve/main/matcha-icefall-en_US-ljspeech.tar.bz2 && \ tar xf matcha-icefall-en_US-ljspeech.tar.bz2 && \ rm matcha-icefall-en_US-ljspeech.tar.bz2 && \ cd matcha-icefall-en_US-ljspeech && \ - wget -O vocos-22khz-univ.onnx https://github.com/k2-fsa/sherpa-onnx/releases/download/vocoder-models/vocos-22khz-univ.onnx + curl -L -o vocos-22khz-univ.onnx \ + https://huggingface.co/streamkit/matcha-models/resolve/main/matcha-icefall-en_US-ljspeech/vocos-22khz-univ.onnx # Stage 10: Build Helsinki Translation plugin (CPU-only) FROM rust:1.92-slim-bookworm AS helsinki-builder diff --git a/Dockerfile.full b/Dockerfile.full index 4321a5fb..43cb2c3d 100644 --- a/Dockerfile.full +++ b/Dockerfile.full @@ -157,14 +157,12 @@ RUN --mount=type=cache,id=cargo-registry-whisper,target=/usr/local/cargo/registr # Download Whisper models (base.en and tiny.en) and Silero VAD model RUN mkdir -p /build/models && \ - curl -L -o /build/models/ggml-base.en.bin \ - https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en.bin && \ curl -L -o /build/models/ggml-base.en-q5_1.bin \ - https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en-q5_1.bin && \ + https://huggingface.co/streamkit/whisper-models/resolve/main/ggml-base.en-q5_1.bin && \ curl -L -o /build/models/ggml-tiny.en-q5_1.bin \ - https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny.en-q5_1.bin && \ + https://huggingface.co/streamkit/whisper-models/resolve/main/ggml-tiny.en-q5_1.bin && \ curl -L -o /build/models/silero_vad.onnx \ - https://raw.githubusercontent.com/snakers4/silero-vad/master/src/silero_vad/data/silero_vad.onnx + https://huggingface.co/streamkit/whisper-models/resolve/main/silero_vad.onnx # Stage 5: Build Kokoro TTS plugin FROM rust:1.92-slim-bookworm AS kokoro-builder @@ -209,7 +207,8 @@ RUN --mount=type=cache,id=cargo-registry-kokoro,target=/usr/local/cargo/registry # Download Kokoro TTS models RUN mkdir -p /build/models && \ cd /build/models && \ - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/kokoro-multi-lang-v1_1.tar.bz2 && \ + curl -L -o kokoro-multi-lang-v1_1.tar.bz2 \ + https://huggingface.co/streamkit/kokoro-models/resolve/main/kokoro-multi-lang-v1_1.tar.bz2 && \ tar xf kokoro-multi-lang-v1_1.tar.bz2 && \ rm kokoro-multi-lang-v1_1.tar.bz2 @@ -252,7 +251,8 @@ RUN --mount=type=cache,id=cargo-registry-piper,target=/usr/local/cargo/registry # Download Piper TTS models RUN mkdir -p /build/models && \ cd /build/models && \ - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/vits-piper-en_US-libritts_r-medium.tar.bz2 && \ + curl -L -o vits-piper-en_US-libritts_r-medium.tar.bz2 \ + https://huggingface.co/streamkit/piper-models/resolve/main/vits-piper-en_US-libritts_r-medium.tar.bz2 && \ tar xf vits-piper-en_US-libritts_r-medium.tar.bz2 && \ rm vits-piper-en_US-libritts_r-medium.tar.bz2 && \ cd vits-piper-en_US-libritts_r-medium && \ @@ -299,7 +299,8 @@ RUN --mount=type=cache,id=cargo-registry-sensevoice,target=/usr/local/cargo/regi # Download SenseVoice models RUN mkdir -p /build/models && \ cd /build/models && \ - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 && \ + curl -L -o sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 \ + https://huggingface.co/streamkit/sensevoice-models/resolve/main/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 && \ tar xf sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 && \ rm sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 @@ -342,7 +343,7 @@ RUN --mount=type=cache,id=cargo-registry-vad,target=/usr/local/cargo/registry \ # Download ten-vad model RUN mkdir -p /build/models && \ curl -L -o /build/models/ten-vad.onnx \ - https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/ten-vad.onnx + https://huggingface.co/streamkit/vad-models/resolve/main/ten-vad.onnx # Stage 9: Build Matcha TTS plugin FROM rust:1.92-slim-bookworm AS matcha-builder @@ -383,11 +384,13 @@ RUN --mount=type=cache,id=cargo-registry-matcha,target=/usr/local/cargo/registry # Download Matcha TTS models RUN mkdir -p /build/models && \ cd /build/models && \ - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/matcha-icefall-en_US-ljspeech.tar.bz2 && \ + curl -L -o matcha-icefall-en_US-ljspeech.tar.bz2 \ + https://huggingface.co/streamkit/matcha-models/resolve/main/matcha-icefall-en_US-ljspeech.tar.bz2 && \ tar xf matcha-icefall-en_US-ljspeech.tar.bz2 && \ rm matcha-icefall-en_US-ljspeech.tar.bz2 && \ cd matcha-icefall-en_US-ljspeech && \ - wget -O vocos-22khz-univ.onnx https://github.com/k2-fsa/sherpa-onnx/releases/download/vocoder-models/vocos-22khz-univ.onnx + curl -L -o vocos-22khz-univ.onnx \ + https://huggingface.co/streamkit/matcha-models/resolve/main/matcha-icefall-en_US-ljspeech/vocos-22khz-univ.onnx # Stage 10: Build NLLB Translation plugin FROM rust:1.92-slim-bookworm AS nllb-builder diff --git a/Dockerfile.full-gpu b/Dockerfile.full-gpu index 80c3ab4f..97a238c5 100644 --- a/Dockerfile.full-gpu +++ b/Dockerfile.full-gpu @@ -168,16 +168,14 @@ RUN --mount=type=cache,id=whisper-cargo-registry,target=/root/.cargo/registry \ # Download Whisper models (base.en, base multilingual, and tiny.en) and Silero VAD model RUN mkdir -p /build/models && \ - curl -L -o /build/models/ggml-base.en.bin \ - https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en.bin && \ curl -L -o /build/models/ggml-base.en-q5_1.bin \ - https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en-q5_1.bin && \ + https://huggingface.co/streamkit/whisper-models/resolve/main/ggml-base.en-q5_1.bin && \ curl -L -o /build/models/ggml-base-q5_1.bin \ - https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base-q5_1.bin && \ + https://huggingface.co/streamkit/whisper-models/resolve/main/ggml-base-q5_1.bin && \ curl -L -o /build/models/ggml-tiny.en-q5_1.bin \ - https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny.en-q5_1.bin && \ + https://huggingface.co/streamkit/whisper-models/resolve/main/ggml-tiny.en-q5_1.bin && \ curl -L -o /build/models/silero_vad.onnx \ - https://raw.githubusercontent.com/snakers4/silero-vad/master/src/silero_vad/data/silero_vad.onnx + https://huggingface.co/streamkit/whisper-models/resolve/main/silero_vad.onnx # Stage 5: Build Kokoro TTS plugin with GPU support ARG CUDA_VERSION @@ -230,7 +228,8 @@ RUN --mount=type=cache,id=kokoro-cargo-registry,target=/root/.cargo/registry \ # Download Kokoro TTS models RUN mkdir -p /build/models && \ cd /build/models && \ - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/kokoro-multi-lang-v1_1.tar.bz2 && \ + curl -L -o kokoro-multi-lang-v1_1.tar.bz2 \ + https://huggingface.co/streamkit/kokoro-models/resolve/main/kokoro-multi-lang-v1_1.tar.bz2 && \ tar xf kokoro-multi-lang-v1_1.tar.bz2 && \ rm kokoro-multi-lang-v1_1.tar.bz2 @@ -279,7 +278,8 @@ RUN --mount=type=cache,id=piper-cargo-registry,target=/root/.cargo/registry \ RUN mkdir -p /build/models && \ cd /build/models && \ # English model (sherpa-onnx format) - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/vits-piper-en_US-libritts_r-medium.tar.bz2 && \ + curl -L -o vits-piper-en_US-libritts_r-medium.tar.bz2 \ + https://huggingface.co/streamkit/piper-models/resolve/main/vits-piper-en_US-libritts_r-medium.tar.bz2 && \ tar xf vits-piper-en_US-libritts_r-medium.tar.bz2 && \ rm vits-piper-en_US-libritts_r-medium.tar.bz2 && \ cd vits-piper-en_US-libritts_r-medium && \ @@ -288,7 +288,8 @@ RUN mkdir -p /build/models && \ fi && \ cd /build/models && \ # Mexican Spanish model (sherpa-onnx format with espeak-ng-data) - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/vits-piper-es_MX-claude-high.tar.bz2 && \ + curl -L -o vits-piper-es_MX-claude-high.tar.bz2 \ + https://huggingface.co/streamkit/piper-models/resolve/main/vits-piper-es_MX-claude-high.tar.bz2 && \ tar xf vits-piper-es_MX-claude-high.tar.bz2 && \ rm vits-piper-es_MX-claude-high.tar.bz2 && \ cd vits-piper-es_MX-claude-high && \ @@ -340,7 +341,8 @@ RUN --mount=type=cache,id=sensevoice-cargo-registry,target=/root/.cargo/registry # Download SenseVoice models RUN mkdir -p /build/models && \ cd /build/models && \ - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 && \ + curl -L -o sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 \ + https://huggingface.co/streamkit/sensevoice-models/resolve/main/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 && \ tar xf sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 && \ rm sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 @@ -388,7 +390,7 @@ RUN --mount=type=cache,id=vad-cargo-registry,target=/root/.cargo/registry \ # Download ten-vad model RUN mkdir -p /build/models && \ curl -L -o /build/models/ten-vad.onnx \ - https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/ten-vad.onnx + https://huggingface.co/streamkit/vad-models/resolve/main/ten-vad.onnx # Stage 9: Build Matcha TTS plugin with GPU support ARG CUDA_VERSION @@ -434,11 +436,13 @@ RUN --mount=type=cache,id=matcha-cargo-registry,target=/root/.cargo/registry \ # Download Matcha TTS models RUN mkdir -p /build/models && \ cd /build/models && \ - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/matcha-icefall-en_US-ljspeech.tar.bz2 && \ + curl -L -o matcha-icefall-en_US-ljspeech.tar.bz2 \ + https://huggingface.co/streamkit/matcha-models/resolve/main/matcha-icefall-en_US-ljspeech.tar.bz2 && \ tar xf matcha-icefall-en_US-ljspeech.tar.bz2 && \ rm matcha-icefall-en_US-ljspeech.tar.bz2 && \ cd matcha-icefall-en_US-ljspeech && \ - wget -O vocos-22khz-univ.onnx https://github.com/k2-fsa/sherpa-onnx/releases/download/vocoder-models/vocos-22khz-univ.onnx + curl -L -o vocos-22khz-univ.onnx \ + https://huggingface.co/streamkit/matcha-models/resolve/main/matcha-icefall-en_US-ljspeech/vocos-22khz-univ.onnx # Stage 10: Build NLLB Translation plugin with GPU support # NOTE: This stage explicitly waits for whisper-builder to complete first diff --git a/justfile b/justfile index bc854bf9..e1b57e65 100644 --- a/justfile +++ b/justfile @@ -446,7 +446,7 @@ download-silero-vad: echo "✓ Silero VAD model downloaded to models/silero_vad.onnx ($(du -h models/silero_vad.onnx | cut -f1))"; \ fi -# Download Whisper models (tiny + base) +# Download Whisper models (tiny.en, base.en, base multilingual) download-whisper-models: @echo "Downloading Whisper models..." @mkdir -p models @@ -466,6 +466,14 @@ download-whisper-models: https://huggingface.co/streamkit/whisper-models/resolve/main/ggml-base.en-q5_1.bin && \ echo "✓ Whisper base.en model downloaded to models/ggml-base.en-q5_1.bin ($(du -h models/ggml-base.en-q5_1.bin | cut -f1))"; \ fi + @if [ -f models/ggml-base-q5_1.bin ]; then \ + echo "✓ Whisper base (multilingual) model already exists at models/ggml-base-q5_1.bin"; \ + else \ + echo "Downloading ggml-base-q5_1.bin (~57MB)..." && \ + curl -L -o models/ggml-base-q5_1.bin \ + https://huggingface.co/streamkit/whisper-models/resolve/main/ggml-base-q5_1.bin && \ + echo "✓ Whisper base (multilingual) model downloaded to models/ggml-base-q5_1.bin ($(du -h models/ggml-base-q5_1.bin | cut -f1))"; \ + fi # Setup Whisper (download models + VAD) setup-whisper: download-whisper-models download-silero-vad @@ -498,19 +506,21 @@ install-sherpa-onnx: download-kokoro-models: @echo "Downloading Kokoro TTS models..." @mkdir -p models - @cd models && \ - if [ -f kokoro-multi-lang-v1_1.tar.bz2 ]; then \ - echo "Archive already exists, skipping download."; \ + @if [ -f models/kokoro-multi-lang-v1_1.tar.bz2 ]; then \ + echo "✓ Kokoro archive already exists at models/kokoro-multi-lang-v1_1.tar.bz2"; \ else \ - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/kokoro-multi-lang-v1_1.tar.bz2; \ - fi && \ - if [ -d kokoro-multi-lang-v1_1 ]; then \ - echo "Models already extracted, skipping."; \ + echo "Downloading kokoro-multi-lang-v1_1.tar.bz2 (~348MB)..." && \ + curl -L -o models/kokoro-multi-lang-v1_1.tar.bz2 \ + https://huggingface.co/streamkit/kokoro-models/resolve/main/kokoro-multi-lang-v1_1.tar.bz2 && \ + echo "✓ Kokoro archive downloaded"; \ + fi + @if [ -d models/kokoro-multi-lang-v1_1 ]; then \ + echo "✓ Kokoro models already extracted at models/kokoro-multi-lang-v1_1"; \ else \ echo "Extracting models..." && \ - tar xf kokoro-multi-lang-v1_1.tar.bz2; \ - fi && \ - echo "✓ Kokoro v1.1 models ready at models/kokoro-multi-lang-v1_1 (103 speakers, 24kHz)" + cd models && tar xf kokoro-multi-lang-v1_1.tar.bz2 && \ + echo "✓ Kokoro v1.1 models ready at models/kokoro-multi-lang-v1_1 (103 speakers, 24kHz)"; \ + fi # Setup Kokoro TTS (install dependencies + download models) setup-kokoro: install-sherpa-onnx download-kokoro-models @@ -577,19 +587,21 @@ upload-matcha-plugin: build-plugin-native-matcha download-sensevoice-models: @echo "Downloading SenseVoice models..." @mkdir -p models - @cd models && \ - if [ -f sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 ]; then \ - echo "Archive already exists, skipping download."; \ + @if [ -f models/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 ]; then \ + echo "✓ SenseVoice archive already exists"; \ else \ - wget https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2; \ - fi && \ - if [ -d sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09 ]; then \ - echo "Models already extracted, skipping."; \ + echo "Downloading sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 (~158MB)..." && \ + curl -L -o models/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 \ + https://huggingface.co/streamkit/sensevoice-models/resolve/main/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 && \ + echo "✓ SenseVoice archive downloaded"; \ + fi + @if [ -d models/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09 ]; then \ + echo "✓ SenseVoice models already extracted"; \ else \ echo "Extracting models..." && \ - tar xf sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2; \ - fi && \ - echo "✓ SenseVoice models ready at models/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09 (multilingual: zh, en, ja, ko, yue)" + cd models && tar xf sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09.tar.bz2 && \ + echo "✓ SenseVoice models ready at models/sherpa-onnx-sense-voice-zh-en-ja-ko-yue-int8-2025-09-09 (multilingual: zh, en, ja, ko, yue)"; \ + fi # Setup SenseVoice (install dependencies + download models) setup-sensevoice: install-sherpa-onnx download-sensevoice-models download-silero-vad @@ -611,17 +623,20 @@ upload-sensevoice-plugin: build-plugin-native-sensevoice # Download pre-converted NLLB models from Hugging Face download-nllb-models: @echo "Downloading pre-converted NLLB-200 models from Hugging Face..." - @echo "⚠️ This requires Python with huggingface-hub installed." - @echo "⚠️ Install with: pip3 install --user huggingface-hub" - @echo "" @mkdir -p models - @cd models && \ - if [ -d nllb-200-distilled-600M-ct2-int8 ]; then \ - echo "NLLB model already downloaded, skipping."; \ + @if [ -f models/nllb-200-distilled-600M-ct2-int8.tar.bz2 ]; then \ + echo "✓ NLLB archive already exists"; \ else \ - echo "Downloading pre-converted NLLB-200-distilled-600M (CTranslate2 format)..."; \ - echo "This will download ~1.2 GB from Hugging Face."; \ - python3 -c "from huggingface_hub import snapshot_download; snapshot_download('entai2965/nllb-200-distilled-600M-ctranslate2', local_dir='nllb-200-distilled-600M-ct2-int8', local_dir_use_symlinks=False)" && \ + echo "Downloading nllb-200-distilled-600M-ct2-int8.tar.bz2 (~1.1GB)..." && \ + curl -L -o models/nllb-200-distilled-600M-ct2-int8.tar.bz2 \ + https://huggingface.co/streamkit/nllb-models/resolve/main/nllb-200-distilled-600M-ct2-int8.tar.bz2 && \ + echo "✓ NLLB archive downloaded"; \ + fi + @if [ -d models/nllb-200-distilled-600M-ct2-int8 ]; then \ + echo "✓ NLLB model already extracted"; \ + else \ + echo "Extracting models..." && \ + cd models && tar xf nllb-200-distilled-600M-ct2-int8.tar.bz2 && \ echo "✓ NLLB model ready at models/nllb-200-distilled-600M-ct2-int8 (supports 200 languages)"; \ fi @@ -656,9 +671,9 @@ download-tenvad-models: @if [ -f models/ten-vad.onnx ]; then \ echo "✓ ten-vad model already exists at models/ten-vad.onnx"; \ else \ - echo "Downloading ten-vad.onnx from GitHub releases..."; \ + echo "Downloading ten-vad.onnx (~324KB)..." && \ curl -L -o models/ten-vad.onnx \ - https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/ten-vad.onnx && \ + https://huggingface.co/streamkit/vad-models/resolve/main/ten-vad.onnx && \ echo "✓ ten-vad model downloaded to models/ten-vad.onnx ($(du -h models/ten-vad.onnx | cut -f1))"; \ fi @@ -690,8 +705,44 @@ upload-vad-plugin: build-plugin-native-vad @curl -X POST -F plugin=@target/release/libvad.so \ http://127.0.0.1:4545/api/v1/plugins -# Download Helsinki-NLP OPUS-MT models for translation +# Download Helsinki-NLP OPUS-MT models for translation (pre-packaged from StreamKit HuggingFace) download-helsinki-models: + @echo "Downloading Helsinki OPUS-MT models..." + @mkdir -p models + @if [ -f models/opus-mt-en-es.tar.bz2 ]; then \ + echo "✓ opus-mt-en-es archive already exists"; \ + else \ + echo "Downloading opus-mt-en-es.tar.bz2 (~272MB)..." && \ + curl -L -o models/opus-mt-en-es.tar.bz2 \ + https://huggingface.co/streamkit/helsinki-models/resolve/main/opus-mt-en-es.tar.bz2 && \ + echo "✓ opus-mt-en-es archive downloaded"; \ + fi + @if [ -f models/opus-mt-es-en.tar.bz2 ]; then \ + echo "✓ opus-mt-es-en archive already exists"; \ + else \ + echo "Downloading opus-mt-es-en.tar.bz2 (~272MB)..." && \ + curl -L -o models/opus-mt-es-en.tar.bz2 \ + https://huggingface.co/streamkit/helsinki-models/resolve/main/opus-mt-es-en.tar.bz2 && \ + echo "✓ opus-mt-es-en archive downloaded"; \ + fi + @if [ -d models/opus-mt-en-es ]; then \ + echo "✓ opus-mt-en-es model already extracted"; \ + else \ + echo "Extracting opus-mt-en-es..." && \ + cd models && tar xf opus-mt-en-es.tar.bz2 && \ + echo "✓ opus-mt-en-es extracted"; \ + fi + @if [ -d models/opus-mt-es-en ]; then \ + echo "✓ opus-mt-es-en model already extracted"; \ + else \ + echo "Extracting opus-mt-es-en..." && \ + cd models && tar xf opus-mt-es-en.tar.bz2 && \ + echo "✓ opus-mt-es-en extracted"; \ + fi + @echo "✓ Helsinki OPUS-MT models ready (Apache 2.0 license)" + +# Download Helsinki models from original source (requires Python dependencies) +download-helsinki-models-source: @echo "⚠️ This requires Python with transformers and tokenizers installed." @echo "⚠️ Install with: pip3 install --user transformers sentencepiece safetensors torch tokenizers" @echo "" diff --git a/plugins/native/matcha/download-models.sh b/plugins/native/matcha/download-models.sh index 223cdd73..3a6f39d3 100755 --- a/plugins/native/matcha/download-models.sh +++ b/plugins/native/matcha/download-models.sh @@ -10,7 +10,7 @@ cd "$(dirname "$0")/../.." MODEL_NAME="matcha-icefall-en_US-ljspeech" MODEL_DIR="models/${MODEL_NAME}" -BASE_URL="https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models" +BASE_URL="https://huggingface.co/streamkit/matcha-models/resolve/main" echo "Downloading Matcha TTS model: ${MODEL_NAME}" echo "Model directory: ${MODEL_DIR}" @@ -18,13 +18,13 @@ echo mkdir -p models -# Download pre-converted model from sherpa-onnx releases -echo "Downloading ${MODEL_NAME}.tar.bz2..." +# Download pre-converted model from StreamKit Hugging Face +echo "Downloading ${MODEL_NAME}.tar.bz2 (~115MB)..." cd models if [ -f "${MODEL_NAME}.tar.bz2" ]; then echo "Archive already exists, skipping download." else - wget "${BASE_URL}/${MODEL_NAME}.tar.bz2" + curl -L -o "${MODEL_NAME}.tar.bz2" "${BASE_URL}/${MODEL_NAME}.tar.bz2" fi if [ -d "${MODEL_NAME}" ]; then @@ -39,13 +39,13 @@ cd .. # Download Vocos vocoder (required, ~10 MB) echo echo "Downloading Vocos vocoder model..." -VOCODER_URL="https://github.com/k2-fsa/sherpa-onnx/releases/download/vocoder-models/vocos-22khz-univ.onnx" +VOCODER_URL="${BASE_URL}/${MODEL_NAME}/vocos-22khz-univ.onnx" VOCODER_PATH="${MODEL_DIR}/vocos-22khz-univ.onnx" if [ -f "${VOCODER_PATH}" ]; then echo "Vocoder already exists, skipping download." else - wget -O "${VOCODER_PATH}" "${VOCODER_URL}" + curl -L -o "${VOCODER_PATH}" "${VOCODER_URL}" echo "✓ Vocoder downloaded to ${VOCODER_PATH}" fi diff --git a/plugins/native/piper/download-models.sh b/plugins/native/piper/download-models.sh index 737028cd..3bda1be2 100755 --- a/plugins/native/piper/download-models.sh +++ b/plugins/native/piper/download-models.sh @@ -10,7 +10,7 @@ cd "$(dirname "$0")/../.." MODEL_NAME="vits-piper-en_US-libritts_r-medium" MODEL_DIR="models/${MODEL_NAME}" -BASE_URL="https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models" +BASE_URL="https://huggingface.co/streamkit/piper-models/resolve/main" echo "Downloading Piper TTS model: ${MODEL_NAME}" echo "Model directory: ${MODEL_DIR}" @@ -18,13 +18,13 @@ echo mkdir -p models -# Download pre-converted model from sherpa-onnx releases -echo "Downloading ${MODEL_NAME}.tar.bz2..." +# Download pre-converted model from StreamKit Hugging Face +echo "Downloading ${MODEL_NAME}.tar.bz2 (~78MB)..." cd models if [ -f "${MODEL_NAME}.tar.bz2" ]; then echo "Archive already exists, skipping download." else - wget "${BASE_URL}/${MODEL_NAME}.tar.bz2" + curl -L -o "${MODEL_NAME}.tar.bz2" "${BASE_URL}/${MODEL_NAME}.tar.bz2" fi if [ -d "${MODEL_NAME}" ]; then diff --git a/plugins/native/piper/download-piper-spanish.sh b/plugins/native/piper/download-piper-spanish.sh index 97cc294b..050e7907 100755 --- a/plugins/native/piper/download-piper-spanish.sh +++ b/plugins/native/piper/download-piper-spanish.sh @@ -7,7 +7,7 @@ set -euo pipefail MODEL_NAME="vits-piper-es_MX-claude-high" MODEL_DIR="models/${MODEL_NAME}" -BASE_URL="https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models" +BASE_URL="https://huggingface.co/streamkit/piper-models/resolve/main" echo "Downloading Mexican Spanish Piper TTS model: ${MODEL_NAME}" echo "Model directory: ${MODEL_DIR}" @@ -15,13 +15,13 @@ echo mkdir -p models -# Download pre-converted model from sherpa-onnx releases -echo "Downloading ${MODEL_NAME}.tar.bz2..." +# Download pre-converted model from StreamKit Hugging Face +echo "Downloading ${MODEL_NAME}.tar.bz2 (~64MB)..." cd models if [ -f "${MODEL_NAME}.tar.bz2" ]; then echo "Archive already exists, skipping download." else - wget "${BASE_URL}/${MODEL_NAME}.tar.bz2" + curl -L -o "${MODEL_NAME}.tar.bz2" "${BASE_URL}/${MODEL_NAME}.tar.bz2" fi if [ -d "${MODEL_NAME}" ]; then From 8c48e86cedf6177807e4dc60c269786a5b6ba688 Mon Sep 17 00:00:00 2001 From: streamer45 Date: Thu, 29 Jan 2026 14:18:01 +0100 Subject: [PATCH 4/6] feat(marketplace): per-plugin release tags and embed manifest in bundles --- .github/workflows/marketplace-build.yml | 30 +++---- .github/workflows/marketplace-release.yml | 102 ++++++++++++++++------ .github/workflows/release.yml | 3 - scripts/marketplace/build_registry.py | 42 +++++++-- 4 files changed, 123 insertions(+), 54 deletions(-) diff --git a/.github/workflows/marketplace-build.yml b/.github/workflows/marketplace-build.yml index 7d03d805..3c2a2270 100644 --- a/.github/workflows/marketplace-build.yml +++ b/.github/workflows/marketplace-build.yml @@ -7,14 +7,6 @@ name: Marketplace Build (Reusable) on: workflow_call: inputs: - snapshot_version: - description: "Marketplace snapshot version (not plugin version; used for release naming)" - required: true - type: string - release_tag: - description: "Release tag for bundle URLs and registry PR" - required: true - type: string registry_base_url: description: "Registry base URL override" required: false @@ -36,8 +28,6 @@ env: RUST_BACKTRACE: 1 SHERPA_ONNX_VERSION: "1.12.17" MINISIGN_DEB_URL: "http://launchpadlibrarian.net/780165111/minisign_0.12-1_amd64.deb" - SNAPSHOT_VERSION: ${{ inputs.snapshot_version }} - RELEASE_TAG: ${{ inputs.release_tag }} REGISTRY_BASE_URL: ${{ inputs.registry_base_url || 'https://streamkit.dev/registry' }} jobs: @@ -149,11 +139,12 @@ jobs: python3 scripts/marketplace/build_registry.py \ --plugins marketplace/official-plugins.json \ --existing-registry docs/public/registry \ - --bundle-base-url "https://github.com/${{ github.repository }}/releases/download/${RELEASE_TAG}" \ + --bundle-url-template "https://github.com/${{ github.repository }}/releases/download/plugin-{plugin_id}-v{version}" \ --registry-base-url "${REGISTRY_BASE_URL}" \ --bundles-out dist/bundles \ --registry-out dist/registry \ - --signing-key /tmp/streamkit.key + --signing-key /tmp/streamkit.key \ + --new-plugins-out dist/new-plugins.json - name: Verify marketplace bundle portability run: | @@ -166,6 +157,13 @@ jobs: with: name: marketplace-bundles path: dist/bundles/*.tar.zst + if-no-files-found: ignore + + - name: Upload new plugins manifest + uses: actions/upload-artifact@v4 + with: + name: new-plugins + path: dist/new-plugins.json - name: Upload registry metadata uses: actions/upload-artifact@v4 @@ -225,10 +223,10 @@ jobs: token: ${{ secrets.REGISTRY_PR_TOKEN || github.token }} author: "StreamKit Registry Bot " committer: "StreamKit Registry Bot " - branch: "registry/${{ env.RELEASE_TAG }}-${{ github.run_id }}" - title: "chore(registry): publish marketplace registry for ${{ env.RELEASE_TAG }}" - commit-message: "chore(registry): publish marketplace registry for ${{ env.RELEASE_TAG }}" + branch: "registry/update-${{ github.run_id }}" + title: "chore(registry): publish marketplace registry update" + commit-message: "chore(registry): publish marketplace registry update" body: | - Automated registry metadata update for `${{ env.RELEASE_TAG }}`. + Automated registry metadata update from run `${{ github.run_id }}`. delete-branch: true base: main diff --git a/.github/workflows/marketplace-release.yml b/.github/workflows/marketplace-release.yml index 1a595d8d..362e7013 100644 --- a/.github/workflows/marketplace-release.yml +++ b/.github/workflows/marketplace-release.yml @@ -7,21 +7,12 @@ name: Marketplace Release on: workflow_dispatch: inputs: - snapshot_version: - description: "Marketplace snapshot version (e.g., 1.2.3, 0.2.0-dev)" - required: true - release_tag: - description: "Release tag (defaults to marketplace-v)" - required: false prerelease: - description: "Mark release as prerelease" + description: "Mark releases as prerelease" required: false type: boolean default: false -env: - RELEASE_TAG: ${{ inputs.release_tag || format('marketplace-v{0}', inputs.snapshot_version) }} - jobs: marketplace: uses: ./.github/workflows/marketplace-build.yml @@ -29,12 +20,9 @@ jobs: contents: write pull-requests: write secrets: inherit - with: - snapshot_version: ${{ inputs.snapshot_version }} - release_tag: ${{ inputs.release_tag || format('marketplace-v{0}', inputs.snapshot_version) }} - create-release: - name: Create Marketplace Release + create-releases: + name: Create Per-Plugin Releases needs: [marketplace] runs-on: ubuntu-22.04 permissions: @@ -42,22 +30,78 @@ jobs: steps: - uses: actions/checkout@v5 - - name: Download all artifacts + - name: Download bundles uses: actions/download-artifact@v4 with: - path: artifacts + name: marketplace-bundles + path: artifacts/bundles - - name: Create Release - uses: softprops/action-gh-release@v2 + - name: Download new plugins manifest + uses: actions/download-artifact@v4 with: - tag_name: ${{ env.RELEASE_TAG }} - target_commitish: ${{ github.sha }} - name: "Marketplace ${{ inputs.snapshot_version }}" - files: | - artifacts/**/marketplace-bundles/*.tar.zst - body: | - Marketplace bundles for snapshot `${{ inputs.snapshot_version }}`. - draft: false - prerelease: ${{ inputs.prerelease }} + name: new-plugins + path: artifacts + + - name: Load plugin metadata + id: plugins + run: | + if [ ! -f artifacts/new-plugins.json ]; then + echo "No new-plugins.json found" + echo "matrix=[]" >> "$GITHUB_OUTPUT" + exit 0 + fi + matrix=$(python3 -c " + import json, sys + data = json.load(open('artifacts/new-plugins.json')) + plugins = data.get('plugins', []) + print(json.dumps(plugins)) + ") + echo "matrix=${matrix}" >> "$GITHUB_OUTPUT" + + - name: Load plugin names + id: names + run: | + names=$(python3 -c " + import json + data = json.load(open('marketplace/official-plugins.json')) + mapping = {p['id']: p.get('name', p['id']) for p in data.get('plugins', [])} + print(json.dumps(mapping)) + ") + echo "names=${names}" >> "$GITHUB_OUTPUT" + + - name: Create per-plugin releases + if: steps.plugins.outputs.matrix != '[]' env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PLUGIN_MATRIX: ${{ steps.plugins.outputs.matrix }} + PLUGIN_NAMES: ${{ steps.names.outputs.names }} + IS_PRERELEASE: ${{ inputs.prerelease }} + run: | + echo "${PLUGIN_MATRIX}" | python3 -c " + import json, os, subprocess, sys + + plugins = json.loads(sys.stdin.read()) + names = json.loads(os.environ['PLUGIN_NAMES']) + is_prerelease = os.environ.get('IS_PRERELEASE', 'false') == 'true' + + for plugin in plugins: + pid = plugin['id'] + version = plugin['version'] + tag = f'plugin-{pid}-v{version}' + name = names.get(pid, pid) + release_name = f'{name} v{version}' + bundle = f'artifacts/bundles/{pid}-{version}-bundle.tar.zst' + + cmd = [ + 'gh', 'release', 'create', tag, + '--target', os.environ.get('GITHUB_SHA', 'HEAD'), + '--title', release_name, + '--notes', f'Plugin bundle for {name} v{version}.', + bundle, + ] + if is_prerelease: + cmd.append('--prerelease') + + print(f'Creating release: {tag}') + subprocess.run(cmd, check=True) + " diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ea18e0ae..b9514c3a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -86,9 +86,6 @@ jobs: contents: write pull-requests: write secrets: inherit - with: - snapshot_version: ${{ github.ref_name }} - release_tag: ${{ github.ref_name }} create-release: name: Create GitHub Release diff --git a/scripts/marketplace/build_registry.py b/scripts/marketplace/build_registry.py index 6c8d74bc..26f55e6f 100644 --- a/scripts/marketplace/build_registry.py +++ b/scripts/marketplace/build_registry.py @@ -76,7 +76,11 @@ def set_runpath_origin(target: pathlib.Path) -> None: def build_bundle( - plugin: dict, version: str, bundles_out: pathlib.Path, work_root: pathlib.Path + plugin: dict, + version: str, + bundles_out: pathlib.Path, + work_root: pathlib.Path, + embedded_manifest: dict | None = None, ) -> dict: plugin_id = plugin["id"] artifact = pathlib.Path(plugin["artifact"]) @@ -107,6 +111,9 @@ def build_bundle( dest = pathlib.Path(extra.get("dest", src.name)) copy_file(src, work_dir / dest) + if embedded_manifest is not None: + write_json(work_dir / "manifest.json", embedded_manifest) + bundle_name = f"{plugin_id}-{version}-bundle.tar.zst" bundle_path = bundles_out / bundle_name ensure_dir(bundles_out) @@ -149,7 +156,7 @@ def dump_manifest_bytes(manifest: dict) -> bytes: def build_manifest( plugin: dict, plugin_version: str, - bundle_block: dict, + bundle_block: dict | None, ) -> dict: """Build manifest dict from plugin metadata and bundle info.""" manifest = { @@ -299,7 +306,9 @@ def main() -> int: parser = argparse.ArgumentParser() parser.add_argument("--plugins", required=True, help="Path to plugin metadata JSON") parser.add_argument( - "--bundle-base-url", required=True, help="Base URL for bundle downloads" + "--bundle-url-template", + required=True, + help="URL template for bundle downloads with {plugin_id} and {version} placeholders", ) parser.add_argument( "--registry-base-url", required=True, help="Base URL for registry metadata" @@ -315,6 +324,10 @@ def main() -> int: "--public-key", help="Path to minisign public key to include in registry (default: docs/public/registry/streamkit.pub if exists)", ) + parser.add_argument( + "--new-plugins-out", + help="Path to write JSON file listing newly built plugins (id + version)", + ) args = parser.parse_args() plugins_path = pathlib.Path(args.plugins) @@ -344,9 +357,10 @@ def main() -> int: print("No plugins found in metadata", file=sys.stderr) return 1 - bundle_base_url = normalize_base_url(args.bundle_base_url) + bundle_url_template = args.bundle_url_template.rstrip("/") registry_base_url = normalize_base_url(args.registry_base_url) published_at = datetime.date.today().isoformat() + new_plugins = [] # Track all versions per plugin for index.json plugin_versions_map = {} # plugin_id -> list of version entries @@ -410,9 +424,18 @@ def main() -> int: shutil.copy2(existing["signature_path"], signature_path) else: # Build new version - bundle_info = build_bundle(plugin, plugin_version, bundles_out, work_root) + # Build the embedded manifest first (without bundle block) so it + # gets included inside the archive for offline inspection. + embedded_manifest = build_manifest(plugin, plugin_version, bundle_block=None) + bundle_info = build_bundle( + plugin, plugin_version, bundles_out, work_root, + embedded_manifest=embedded_manifest, + ) + bundle_base = bundle_url_template.format( + plugin_id=plugin_id, version=plugin_version, + ) bundle_block = { - "url": f"{bundle_base_url}/{bundle_info['bundle_name']}", + "url": f"{bundle_base}/{bundle_info['bundle_name']}", "sha256": bundle_info["sha256"], "size_bytes": bundle_info["size_bytes"], } @@ -423,6 +446,8 @@ def main() -> int: write_json(manifest_path, manifest) sign_manifest(manifest_path, signing_key) + new_plugins.append({"id": plugin_id, "version": plugin_version}) + print( f"Built {plugin_id} v{plugin_version} -> {bundle_info['bundle_name']} ({bundle_info['sha256']})" ) @@ -530,6 +555,11 @@ def main() -> int: elif args.public_key: print(f"WARNING: Specified public key not found: {args.public_key}", file=sys.stderr) + if args.new_plugins_out: + new_plugins_path = pathlib.Path(args.new_plugins_out) + write_json(new_plugins_path, {"plugins": new_plugins}) + print(f"Wrote {len(new_plugins)} new plugin(s) to {new_plugins_path}") + if work_root.exists(): shutil.rmtree(work_root) From 46c0ce95d29d1acf63bd95d236a8341990dbfcc8 Mon Sep 17 00:00:00 2001 From: streamer45 Date: Thu, 29 Jan 2026 16:41:13 +0100 Subject: [PATCH 5/6] fix(ui): tweaks --- ui/src/App.tsx | 6 ++++- ui/src/views/PluginsView.styles.ts | 28 ++++++++++++++++++++++ ui/src/views/PluginsView.tsx | 14 ++++++++--- ui/src/views/plugins/MarketplacePanels.tsx | 17 +++++++------ 4 files changed, 54 insertions(+), 11 deletions(-) diff --git a/ui/src/App.tsx b/ui/src/App.tsx index 46c42bbb..ba59e3d3 100644 --- a/ui/src/App.tsx +++ b/ui/src/App.tsx @@ -118,7 +118,11 @@ const App: React.FC = () => { } /> } /> } /> - } /> + } + /> + } /> } /> diff --git a/ui/src/views/PluginsView.styles.ts b/ui/src/views/PluginsView.styles.ts index bba0759b..ab6e7686 100644 --- a/ui/src/views/PluginsView.styles.ts +++ b/ui/src/views/PluginsView.styles.ts @@ -201,12 +201,40 @@ export const MarketplaceGrid = styled.div` display: grid; grid-template-columns: minmax(240px, 1fr) minmax(320px, 2fr); gap: 16px; + align-items: start; @media (max-width: 1000px) { grid-template-columns: 1fr; } `; +export const DetailsSection = styled.section` + box-sizing: border-box; + border: 1px solid var(--sk-border); + border-radius: 12px; + padding: 16px; + background: var(--sk-bg); + display: flex; + flex-direction: column; + gap: 12px; + min-width: 0; + min-height: 420px; + position: relative; +`; + +export const DetailsLoadingOverlay = styled.div` + position: absolute; + inset: 0; + border-radius: 12px; + background: color-mix(in srgb, var(--sk-bg) 70%, transparent); + display: flex; + align-items: center; + justify-content: center; + z-index: 1; + color: var(--sk-text-muted); + font-size: 13px; +`; + export const MarketplaceList = styled.div` display: flex; flex-direction: column; diff --git a/ui/src/views/PluginsView.tsx b/ui/src/views/PluginsView.tsx index 24861d89..bdecdc45 100644 --- a/ui/src/views/PluginsView.tsx +++ b/ui/src/views/PluginsView.tsx @@ -2,7 +2,8 @@ // // SPDX-License-Identifier: MPL-2.0 -import React, { useState } from 'react'; +import React from 'react'; +import { useNavigate, useParams } from 'react-router-dom'; import { TabsContent, TabsList, TabsRoot, TabsTrigger } from '@/components/ui/Tabs'; import { usePermissions } from '@/hooks/usePermissions'; @@ -23,9 +24,16 @@ import { TitleRow, } from './PluginsView.styles'; +type PluginsTab = 'installed' | 'marketplace'; + +const isValidTab = (tab: string | undefined): tab is PluginsTab => + tab === 'installed' || tab === 'marketplace'; + const PluginsView: React.FC = () => { const { role, isAdmin } = usePermissions(); - const [activeTab, setActiveTab] = useState<'installed' | 'marketplace'>('installed'); + const { tab } = useParams<{ tab: string }>(); + const navigate = useNavigate(); + const activeTab: PluginsTab = isValidTab(tab) ? tab : 'installed'; return ( @@ -50,7 +58,7 @@ const PluginsView: React.FC = () => { setActiveTab(value as typeof activeTab)} + onValueChange={(value) => navigate(`/admin/plugins/${value}`, { replace: true })} > Installed diff --git a/ui/src/views/plugins/MarketplacePanels.tsx b/ui/src/views/plugins/MarketplacePanels.tsx index 60f497d7..aa65dd6a 100644 --- a/ui/src/views/plugins/MarketplacePanels.tsx +++ b/ui/src/views/plugins/MarketplacePanels.tsx @@ -12,6 +12,8 @@ import type { PluginSummary } from '@/types/types'; import { DetailsDescription, DetailsHeader, + DetailsLoadingOverlay, + DetailsSection, DetailsTitle, EmptyState, KeyLabel, @@ -222,7 +224,7 @@ export const MarketplaceDetailsPanel: React.FC = ( onInstallModelsChange, onInstall, }) => { - if (loading) return ; + if (!details && loading) return ; if (!details) return ; const installedVersion = installedPlugin?.version ?? null; @@ -245,7 +247,8 @@ export const MarketplaceDetailsPanel: React.FC = ( const installLabel = getInstallLabel({ installing, installedPlugin, installModels, hasModels }); return ( -
+ + {loading && Loading plugin details...} Details = ( {installLabel} -
+ ); }; const MarketplaceDetailsLoading: React.FC = () => ( -
+ Details Loading plugin details... -
+ ); const MarketplaceDetailsEmpty: React.FC = () => ( -
+ Details Select a plugin to view. -
+ ); const MarketplaceDetailsHeader: React.FC<{ details: MarketplacePluginDetails }> = ({ details }) => ( From 237651ca40fcb5d117d6ffb2c6c09f60f48d6813 Mon Sep 17 00:00:00 2001 From: streamer45 Date: Thu, 29 Jan 2026 16:51:43 +0100 Subject: [PATCH 6/6] fix(ui): more tweaks --- ui/src/context/ToastContext.tsx | 2 +- ui/src/stores/toastStore.ts | 8 +++++--- ui/src/views/plugins/MarketplacePanels.tsx | 18 +++++++++++------- 3 files changed, 17 insertions(+), 11 deletions(-) diff --git a/ui/src/context/ToastContext.tsx b/ui/src/context/ToastContext.tsx index db6d781f..0933cf7c 100644 --- a/ui/src/context/ToastContext.tsx +++ b/ui/src/context/ToastContext.tsx @@ -116,7 +116,7 @@ export const Toaster: React.FC<{
{t.type} -
{t.message}
+
{t.message}
{effectiveOnClose && ( effectiveOnClose(t.id)}> ✕ diff --git a/ui/src/stores/toastStore.ts b/ui/src/stores/toastStore.ts index 4383dc76..e9f0eb83 100644 --- a/ui/src/stores/toastStore.ts +++ b/ui/src/stores/toastStore.ts @@ -43,8 +43,10 @@ export const useToastStore = create((set, get) => ({ export function showToast(message: string, type: ToastType = 'info'): number { const id = useToastStore.getState().addToast(message, type); - window.setTimeout(() => { - useToastStore.getState().removeToast(id); - }, 3000); + if (type !== 'error') { + window.setTimeout(() => { + useToastStore.getState().removeToast(id); + }, 3000); + } return id; } diff --git a/ui/src/views/plugins/MarketplacePanels.tsx b/ui/src/views/plugins/MarketplacePanels.tsx index aa65dd6a..d0481922 100644 --- a/ui/src/views/plugins/MarketplacePanels.tsx +++ b/ui/src/views/plugins/MarketplacePanels.tsx @@ -31,6 +31,7 @@ import { SectionTitle, Select, SignatureValue, + SubSectionLabel, Subtle, WarningBox, } from '../PluginsView.styles'; @@ -265,7 +266,6 @@ export const MarketplaceDetailsPanel: React.FC = ( kind={details.manifest.kind} allowNativeMarketplace={details.allow_native_marketplace} /> - {requiresLicenseAcceptance && } = ({ enabled, checked, onChange }) => { if (!enabled) return null; return ( - onChange(Boolean(value))} - /> + <> + + License + onChange(Boolean(value))} + /> + ); };