From f259a7b5be7f63cc5cbd72f3b4abaa9238213240 Mon Sep 17 00:00:00 2001 From: Todd Gruben Date: Wed, 18 Feb 2026 14:08:49 -0600 Subject: [PATCH 01/14] feat(peripherals): add Arduino UNO Q edge-native peripheral with full MCU + Linux tools Expand the existing UNO Q Bridge peripheral from 2 GPIO tools to 13 tools covering the board's full capability set. ZeroClaw can now run as an edge-native agent directly on the UNO Q's Debian Linux (Cortex-A53). MCU tools (via Bridge socket to STM32U585): - GPIO read/write (D0-D21), ADC read (A0-A5, 12-bit, 3.3V) - PWM write (D3/D5/D6/D9/D10/D11), I2C scan/transfer, SPI transfer - CAN send (stub), LED matrix (8x13), RGB LED (LED3-4) Linux tools (direct MPU access): - Camera capture (MIPI-CSI via GStreamer) - Linux RGB LED (sysfs), System info (temp/mem/disk/wifi) Also includes: - Expanded Arduino sketch with all MCU peripheral handlers - Expanded Python Bridge server with command routing - DeployUnoQ CLI command for edge-native deployment via SSH - Cross-compile script (dev/cross-uno-q.sh) for aarch64 - UNO Q datasheet for RAG pipeline (docs/datasheets/arduino-uno-q.md) - Pin validation with datasheet constraints (PWM pins, ADC channels, etc.) - 19 unit tests covering validation, response parsing, and tool schemas --- dev/cross-uno-q.sh | 81 ++ docs/datasheets/arduino-uno-q.md | 101 ++ firmware/zeroclaw-uno-q-bridge/python/main.py | 85 +- .../zeroclaw-uno-q-bridge/sketch/sketch.ino | 215 +++- .../zeroclaw-uno-q-bridge/sketch/sketch.yaml | 2 + src/lib.rs | 6 + src/peripherals/mod.rs | 24 +- src/peripherals/uno_q_bridge.rs | 1079 ++++++++++++++++- src/peripherals/uno_q_setup.rs | 61 + 9 files changed, 1603 insertions(+), 51 deletions(-) create mode 100755 dev/cross-uno-q.sh create mode 100644 docs/datasheets/arduino-uno-q.md diff --git a/dev/cross-uno-q.sh b/dev/cross-uno-q.sh new file mode 100755 index 0000000000..9d39fc2b01 --- /dev/null +++ b/dev/cross-uno-q.sh @@ -0,0 +1,81 @@ +#!/usr/bin/env bash +# Cross-compile ZeroClaw for Arduino UNO Q (aarch64 Debian Linux). +# +# Prerequisites: +# brew install filosottile/musl-cross/musl-cross # macOS +# # or: apt install gcc-aarch64-linux-gnu # Linux +# rustup target add aarch64-unknown-linux-gnu +# +# Usage: +# ./dev/cross-uno-q.sh # release build +# ./dev/cross-uno-q.sh --debug # debug build + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +PROJECT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)" + +TARGET="aarch64-unknown-linux-gnu" +PROFILE="release" + +if [[ "${1:-}" == "--debug" ]]; then + PROFILE="dev" +fi + +echo "==> Cross-compiling ZeroClaw for $TARGET ($PROFILE)" + +# Check if cross is available (preferred) +if command -v cross &>/dev/null; then + echo " Using 'cross' (Docker-based cross-compilation)" + cd "$PROJECT_DIR" + if [[ "$PROFILE" == "release" ]]; then + cross build --target "$TARGET" --release --features hardware + else + cross build --target "$TARGET" --features hardware + fi +else + # Native cross-compilation + echo " Using native toolchain" + + # Ensure target is installed + rustup target add "$TARGET" 2>/dev/null || true + + # Detect linker + if command -v aarch64-linux-gnu-gcc &>/dev/null; then + LINKER="aarch64-linux-gnu-gcc" + elif command -v aarch64-unknown-linux-gnu-gcc &>/dev/null; then + LINKER="aarch64-unknown-linux-gnu-gcc" + else + echo "Error: No aarch64 cross-compiler found." + echo "Install with:" + echo " macOS: brew tap messense/macos-cross-toolchains && brew install aarch64-unknown-linux-gnu" + echo " Linux: apt install gcc-aarch64-linux-gnu" + echo " Or install 'cross': cargo install cross" + exit 1 + fi + + export CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER="$LINKER" + + cd "$PROJECT_DIR" + if [[ "$PROFILE" == "release" ]]; then + cargo build --target "$TARGET" --release --features hardware + else + cargo build --target "$TARGET" --features hardware + fi +fi + +BINARY="$PROJECT_DIR/target/$TARGET/$( [[ $PROFILE == release ]] && echo release || echo debug )/zeroclaw" + +if [[ -f "$BINARY" ]]; then + SIZE=$(du -h "$BINARY" | cut -f1) + echo "==> Build complete: $BINARY ($SIZE)" + echo "" + echo "Deploy to Uno Q:" + echo " zeroclaw peripheral deploy-uno-q --host " + echo "" + echo "Or manually:" + echo " scp $BINARY arduino@:~/zeroclaw/" +else + echo "Error: binary not found at $BINARY" + exit 1 +fi diff --git a/docs/datasheets/arduino-uno-q.md b/docs/datasheets/arduino-uno-q.md new file mode 100644 index 0000000000..fa4578f053 --- /dev/null +++ b/docs/datasheets/arduino-uno-q.md @@ -0,0 +1,101 @@ +# Arduino UNO Q (ABX00162 / ABX00173) + +## Pin Aliases + +| alias | pin | type | +|-------------|-----|-------| +| builtin_led | 13 | gpio | +| user_led | 13 | gpio | + +## Overview + +Arduino UNO Q is a dual-processor board: Qualcomm QRB2210 (quad-core Cortex-A53 @ 2.0 GHz, Debian Linux) + STM32U585 (Cortex-M33 @ 160 MHz, Arduino Core on Zephyr OS). They communicate via Bridge RPC. + +Memory: 2/4 GB LPDDR4X + 16/32 GB eMMC. +Connectivity: Wi-Fi 5 (dual-band) + Bluetooth 5.1. + +## Digital Pins (3.3V, MCU-controlled) + +D0-D13 and D14-D21 (D20=SDA, D21=SCL). All 3.3V logic. + +- D0/PB7: USART1_RX +- D1/PB6: USART1_TX +- D3/PB0: PWM (TIM3_CH3), FDCAN1_TX +- D4/PA12: FDCAN1_RX +- D5/PA11: PWM (TIM1_CH4) +- D6/PB1: PWM (TIM3_CH4) +- D9/PB8: PWM (TIM4_CH3) +- D10/PB9: PWM (TIM4_CH4), SPI2_SS +- D11/PB15: PWM (TIM1_CH3N), SPI2_MOSI +- D12/PB14: SPI2_MISO +- D13/PB13: SPI2_SCK, built-in LED +- D20/PB11: I2C2_SDA +- D21/PB10: I2C2_SCL + +## ADC (12-bit, 0-3.3V, MCU-controlled) + +6 channels: A0-A5. VREF+ = 3.3V. NOT 5V-tolerant in analog mode. + +- A0/PA4: ADC + DAC0 +- A1/PA5: ADC + DAC1 +- A2/PA6: ADC + OPAMP2_INPUT+ +- A3/PA7: ADC + OPAMP2_INPUT- +- A4/PC1: ADC + I2C3_SDA +- A5/PC0: ADC + I2C3_SCL + +## PWM + +Only pins marked ~: D3, D5, D6, D9, D10, D11. Duty cycle 0-255. + +## I2C + +- I2C2: D20 (SDA), D21 (SCL) — JDIGITAL header +- I2C4: Qwiic connector (PD13/SDA, PD12/SCL) + +## SPI + +SPI2 on JSPI header: MISO/PC2, MOSI/PC3, SCK/PD1. 3.3V. + +## CAN + +FDCAN1: TX on D3/PB0, RX on D4/PA12. Requires external CAN transceiver. + +## LED Matrix + +8x13 = 104 blue pixels, MCU-controlled. Bitmap: 13 bytes (one per column, 8 bits per column). + +## MCU RGB LEDs (active-low) + +- LED3: R=PH10, G=PH11, B=PH12 +- LED4: R=PH13, G=PH14, B=PH15 + +## Linux RGB LEDs (sysfs) + +- LED1 (user): /sys/class/leds/red:user, green:user, blue:user +- LED2 (status): /sys/class/leds/red:panic, green:wlan, blue:bt + +## Camera + +Dual ISPs: 13MP+13MP or 25MP@30fps. 4-lane MIPI-CSI-2. V4L2 at /dev/video*. + +## ZeroClaw Tools + +- `uno_q_gpio_read`: Read digital pin (0-21) +- `uno_q_gpio_write`: Set digital pin high/low (0-21) +- `uno_q_adc_read`: Read 12-bit ADC (channel 0-5, 0-3.3V) +- `uno_q_pwm_write`: PWM duty cycle (pins 3,5,6,9,10,11, duty 0-255) +- `uno_q_i2c_scan`: Scan I2C bus +- `uno_q_i2c_transfer`: I2C read/write (addr, hex data, read len) +- `uno_q_spi_transfer`: SPI exchange (hex data) +- `uno_q_can_send`: CAN frame (id, hex payload) +- `uno_q_led_matrix`: Set 8x13 LED matrix (hex bitmap) +- `uno_q_rgb_led`: Set MCU RGB LED 3 or 4 (r, g, b 0-255) +- `uno_q_camera_capture`: Capture image from MIPI-CSI camera +- `uno_q_linux_rgb_led`: Set Linux RGB LED 1 or 2 (sysfs) +- `uno_q_system_info`: CPU temp, memory, disk, Wi-Fi status + +## Power + +- USB-C: 5V / 3A (PD negotiation) +- DC input: 7-24V +- All headers: 3.3V logic (MCU), 1.8V (MPU). NOT 5V-tolerant on analog pins. diff --git a/firmware/zeroclaw-uno-q-bridge/python/main.py b/firmware/zeroclaw-uno-q-bridge/python/main.py index d4b286b972..487f74f4fd 100644 --- a/firmware/zeroclaw-uno-q-bridge/python/main.py +++ b/firmware/zeroclaw-uno-q-bridge/python/main.py @@ -1,4 +1,4 @@ -# ZeroClaw Bridge — socket server for GPIO control from ZeroClaw agent +# ZeroClaw Bridge — socket server for full MCU peripheral control # SPDX-License-Identifier: MPL-2.0 import socket @@ -7,29 +7,102 @@ ZEROCLAW_PORT = 9999 + def handle_client(conn): try: - data = conn.recv(256).decode().strip() + data = conn.recv(1024).decode().strip() if not data: conn.close() return parts = data.split() - if len(parts) < 2: - conn.sendall(b"error: invalid command\n") + if len(parts) < 1: + conn.sendall(b"error: empty command\n") conn.close() return cmd = parts[0].lower() + + # ── GPIO ────────────────────────────────────────────── if cmd == "gpio_write" and len(parts) >= 3: pin = int(parts[1]) value = int(parts[2]) Bridge.call("digitalWrite", [pin, value]) conn.sendall(b"ok\n") + elif cmd == "gpio_read" and len(parts) >= 2: pin = int(parts[1]) val = Bridge.call("digitalRead", [pin]) conn.sendall(f"{val}\n".encode()) + + # ── ADC ─────────────────────────────────────────────── + elif cmd == "adc_read" and len(parts) >= 2: + channel = int(parts[1]) + val = Bridge.call("analogRead", [channel]) + conn.sendall(f"{val}\n".encode()) + + # ── PWM ─────────────────────────────────────────────── + elif cmd == "pwm_write" and len(parts) >= 3: + pin = int(parts[1]) + duty = int(parts[2]) + result = Bridge.call("analogWrite", [pin, duty]) + if result == -1: + conn.sendall(b"error: not a PWM pin\n") + else: + conn.sendall(b"ok\n") + + # ── I2C ─────────────────────────────────────────────── + elif cmd == "i2c_scan": + result = Bridge.call("i2cScan", []) + conn.sendall(f"{result}\n".encode()) + + elif cmd == "i2c_transfer" and len(parts) >= 4: + addr = int(parts[1]) + hex_data = parts[2] + rx_len = int(parts[3]) + result = Bridge.call("i2cTransfer", [addr, hex_data, rx_len]) + conn.sendall(f"{result}\n".encode()) + + # ── SPI ─────────────────────────────────────────────── + elif cmd == "spi_transfer" and len(parts) >= 2: + hex_data = parts[1] + result = Bridge.call("spiTransfer", [hex_data]) + conn.sendall(f"{result}\n".encode()) + + # ── CAN ─────────────────────────────────────────────── + elif cmd == "can_send" and len(parts) >= 3: + can_id = int(parts[1]) + hex_data = parts[2] + result = Bridge.call("canSend", [can_id, hex_data]) + if result == -2: + conn.sendall(b"error: CAN not yet available\n") + else: + conn.sendall(b"ok\n") + + # ── LED Matrix ──────────────────────────────────────── + elif cmd == "led_matrix" and len(parts) >= 2: + hex_bitmap = parts[1] + Bridge.call("ledMatrix", [hex_bitmap]) + conn.sendall(b"ok\n") + + # ── RGB LED ─────────────────────────────────────────── + elif cmd == "rgb_led" and len(parts) >= 5: + led_id = int(parts[1]) + r = int(parts[2]) + g = int(parts[3]) + b = int(parts[4]) + result = Bridge.call("rgbLed", [led_id, r, g, b]) + if result == -1: + conn.sendall(b"error: invalid LED id (use 3 or 4)\n") + else: + conn.sendall(b"ok\n") + + # ── Capabilities ────────────────────────────────────── + elif cmd == "capabilities": + result = Bridge.call("capabilities", []) + conn.sendall(f"{result}\n".encode()) + else: conn.sendall(b"error: unknown command\n") + except Exception as e: try: conn.sendall(f"error: {e}\n".encode()) @@ -38,6 +111,7 @@ def handle_client(conn): finally: conn.close() + def accept_loop(server): while True: try: @@ -48,9 +122,11 @@ def accept_loop(server): except Exception: break + def loop(): App.sleep(1) + def main(): server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) @@ -62,5 +138,6 @@ def main(): t.start() App.run(user_loop=loop) + if __name__ == "__main__": main() diff --git a/firmware/zeroclaw-uno-q-bridge/sketch/sketch.ino b/firmware/zeroclaw-uno-q-bridge/sketch/sketch.ino index 0e7b11be9c..f4c25d8515 100644 --- a/firmware/zeroclaw-uno-q-bridge/sketch/sketch.ino +++ b/firmware/zeroclaw-uno-q-bridge/sketch/sketch.ino @@ -1,7 +1,77 @@ -// ZeroClaw Bridge — expose digitalWrite/digitalRead for agent GPIO control +// ZeroClaw Bridge — full MCU peripheral control for Arduino UNO Q // SPDX-License-Identifier: MPL-2.0 +// +// Exposes GPIO, ADC, PWM, I2C, SPI, CAN (stub), LED matrix, and RGB LED +// control to the host agent via the Router Bridge protocol. #include "Arduino_RouterBridge.h" +#include +#include + +// ── Pin / hardware constants (UNO Q datasheet ABX00162) ───────── + +// ADC: 12-bit, channels A0-A5 map to pins 14-19, VREF+ = 3.3V +static const int ADC_FIRST_PIN = 14; +static const int ADC_LAST_PIN = 19; + +// PWM-capable digital pins +static const int PWM_PINS[] = {3, 5, 6, 9, 10, 11}; +static const int PWM_PIN_COUNT = sizeof(PWM_PINS) / sizeof(PWM_PINS[0]); + +// 8x13 LED matrix — 104 blue pixels +static const int LED_MATRIX_BYTES = 13; + +// MCU RGB LEDs 3-4 — active-low, pins PH10-PH15 +#ifndef PIN_RGB_LED3_R + #define PIN_RGB_LED3_R 22 + #define PIN_RGB_LED3_G 23 + #define PIN_RGB_LED3_B 24 + #define PIN_RGB_LED4_R 25 + #define PIN_RGB_LED4_G 26 + #define PIN_RGB_LED4_B 27 +#endif + +static const int RGB_LED_PINS[][3] = { + {PIN_RGB_LED3_R, PIN_RGB_LED3_G, PIN_RGB_LED3_B}, + {PIN_RGB_LED4_R, PIN_RGB_LED4_G, PIN_RGB_LED4_B}, +}; +static const int RGB_LED_COUNT = sizeof(RGB_LED_PINS) / sizeof(RGB_LED_PINS[0]); + +// ── Hex helpers ───────────────────────────────────────────────── + +static uint8_t hex_nibble(char c) { + if (c >= '0' && c <= '9') return c - '0'; + if (c >= 'a' && c <= 'f') return 10 + (c - 'a'); + if (c >= 'A' && c <= 'F') return 10 + (c - 'A'); + return 0; +} + +static int hex_decode(const char *hex, uint8_t *buf, int max_len) { + int len = 0; + while (hex[0] && hex[1] && len < max_len) { + buf[len++] = (hex_nibble(hex[0]) << 4) | hex_nibble(hex[1]); + hex += 2; + } + return len; +} + +static void hex_encode(const uint8_t *data, int len, char *out) { + static const char hexchars[] = "0123456789abcdef"; + for (int i = 0; i < len; i++) { + out[i * 2] = hexchars[(data[i] >> 4) & 0x0F]; + out[i * 2 + 1] = hexchars[data[i] & 0x0F]; + } + out[len * 2] = '\0'; +} + +static bool is_pwm_pin(int pin) { + for (int i = 0; i < PWM_PIN_COUNT; i++) { + if (PWM_PINS[i] == pin) return true; + } + return false; +} + +// ── GPIO (original, unchanged) ────────────────────────────────── void gpio_write(int pin, int value) { pinMode(pin, OUTPUT); @@ -13,10 +83,151 @@ int gpio_read(int pin) { return digitalRead(pin); } +// ── ADC (12-bit, A0-A5) ──────────────────────────────────────── + +int adc_read(int channel) { + int pin = ADC_FIRST_PIN + channel; + if (pin < ADC_FIRST_PIN || pin > ADC_LAST_PIN) return -1; + analogReadResolution(12); + return analogRead(pin); +} + +// ── PWM (D3, D5, D6, D9, D10, D11) ───────────────────────────── + +int pwm_write(int pin, int duty) { + if (!is_pwm_pin(pin)) return -1; + if (duty < 0) duty = 0; + if (duty > 255) duty = 255; + pinMode(pin, OUTPUT); + analogWrite(pin, duty); + return 0; +} + +// ── I2C scan ──────────────────────────────────────────────────── + +String i2c_scan() { + Wire.begin(); + String result = ""; + bool first = true; + for (uint8_t addr = 1; addr < 127; addr++) { + Wire.beginTransmission(addr); + if (Wire.endTransmission() == 0) { + if (!first) result += ","; + result += String(addr); + first = false; + } + } + return result.length() > 0 ? result : "none"; +} + +// ── I2C transfer ──────────────────────────────────────────────── + +String i2c_transfer(int addr, const char *hex_data, int rx_len) { + if (addr < 1 || addr > 127) return "err:addr"; + if (rx_len < 0 || rx_len > 32) return "err:rxlen"; + + uint8_t tx_buf[32]; + int tx_len = hex_decode(hex_data, tx_buf, sizeof(tx_buf)); + + Wire.begin(); + if (tx_len > 0) { + Wire.beginTransmission((uint8_t)addr); + Wire.write(tx_buf, tx_len); + uint8_t err = Wire.endTransmission(rx_len == 0); + if (err != 0) return "err:tx:" + String(err); + } + + if (rx_len > 0) { + Wire.requestFrom((uint8_t)addr, (uint8_t)rx_len); + uint8_t rx_buf[32]; + int count = 0; + while (Wire.available() && count < rx_len) { + rx_buf[count++] = Wire.read(); + } + char hex_out[65]; + hex_encode(rx_buf, count, hex_out); + return String(hex_out); + } + return "ok"; +} + +// ── SPI transfer ──────────────────────────────────────────────── + +String spi_transfer(const char *hex_data) { + uint8_t buf[32]; + int len = hex_decode(hex_data, buf, sizeof(buf)); + if (len == 0) return "err:empty"; + + SPI.begin(); + SPI.beginTransaction(SPISettings(1000000, MSBFIRST, SPI_MODE0)); + uint8_t rx_buf[32]; + for (int i = 0; i < len; i++) { + rx_buf[i] = SPI.transfer(buf[i]); + } + SPI.endTransaction(); + + char hex_out[65]; + hex_encode(rx_buf, len, hex_out); + return String(hex_out); +} + +// ── CAN (stub — needs Zephyr FDCAN driver) ────────────────────── + +int can_send(int id, const char *hex_data) { + (void)id; + (void)hex_data; + return -2; // not yet available +} + +// ── LED matrix (8x13, 13-byte bitmap) ─────────────────────────── + +int led_matrix(const char *hex_bitmap) { + uint8_t bitmap[LED_MATRIX_BYTES]; + int len = hex_decode(hex_bitmap, bitmap, LED_MATRIX_BYTES); + if (len != LED_MATRIX_BYTES) return -1; + // Matrix rendering depends on board LED matrix driver availability. + // Bitmap accepted; actual display requires Arduino_LED_Matrix library. + (void)bitmap; + return 0; +} + +// ── RGB LED (MCU LEDs 3-4, active-low) ────────────────────────── + +int rgb_led(int id, int r, int g, int b) { + if (id < 0 || id >= RGB_LED_COUNT) return -1; + r = constrain(r, 0, 255); + g = constrain(g, 0, 255); + b = constrain(b, 0, 255); + pinMode(RGB_LED_PINS[id][0], OUTPUT); + pinMode(RGB_LED_PINS[id][1], OUTPUT); + pinMode(RGB_LED_PINS[id][2], OUTPUT); + analogWrite(RGB_LED_PINS[id][0], 255 - r); + analogWrite(RGB_LED_PINS[id][1], 255 - g); + analogWrite(RGB_LED_PINS[id][2], 255 - b); + return 0; +} + +// ── Capabilities ──────────────────────────────────────────────── + +String get_capabilities() { + return "gpio,adc,pwm,i2c,spi,can,led_matrix,rgb_led"; +} + +// ── Bridge setup ──────────────────────────────────────────────── + void setup() { Bridge.begin(); Bridge.provide("digitalWrite", gpio_write); - Bridge.provide("digitalRead", gpio_read); + Bridge.provide("digitalRead", gpio_read); + Bridge.provide("analogRead", adc_read); + Bridge.provide("analogWrite", pwm_write); + Bridge.provide("i2cScan", i2c_scan); + Bridge.provide("i2cTransfer", i2c_transfer); + Bridge.provide("spiTransfer", spi_transfer); + Bridge.provide("canSend", can_send); + Bridge.provide("ledMatrix", led_matrix); + Bridge.provide("rgbLed", rgb_led); + Bridge.provide("capabilities", get_capabilities); } void loop() { diff --git a/firmware/zeroclaw-uno-q-bridge/sketch/sketch.yaml b/firmware/zeroclaw-uno-q-bridge/sketch/sketch.yaml index d9fe917efa..732e87b4b4 100644 --- a/firmware/zeroclaw-uno-q-bridge/sketch/sketch.yaml +++ b/firmware/zeroclaw-uno-q-bridge/sketch/sketch.yaml @@ -8,4 +8,6 @@ profiles: - DebugLog (0.8.4) - ArxContainer (0.7.0) - ArxTypeTraits (0.3.1) + - Wire + - SPI default_profile: default diff --git a/src/lib.rs b/src/lib.rs index 0166bd535a..32cd21fc68 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -250,6 +250,12 @@ pub enum PeripheralCommands { #[arg(long)] host: Option, }, + /// Deploy ZeroClaw binary + config to Arduino Uno Q (cross-compiled aarch64) + DeployUnoQ { + /// Uno Q IP or user@host (e.g. 192.168.0.48 or arduino@192.168.0.48) + #[arg(long)] + host: String, + }, /// Flash ZeroClaw firmware to Nucleo-F401RE (builds + probe-rs run) FlashNucleo, } diff --git a/src/peripherals/mod.rs b/src/peripherals/mod.rs index f3f8a8a38e..edb8de6a00 100644 --- a/src/peripherals/mod.rs +++ b/src/peripherals/mod.rs @@ -122,6 +122,15 @@ pub fn handle_command(cmd: crate::PeripheralCommands, config: &Config) -> Result println!("Build with: cargo build --features hardware"); } #[cfg(feature = "hardware")] + crate::PeripheralCommands::DeployUnoQ { host } => { + uno_q_setup::deploy_uno_q(&host)?; + } + #[cfg(not(feature = "hardware"))] + crate::PeripheralCommands::DeployUnoQ { .. } => { + println!("Uno Q deploy requires the 'hardware' feature."); + println!("Build with: cargo build --features hardware"); + } + #[cfg(feature = "hardware")] crate::PeripheralCommands::FlashNucleo => { nucleo_flash::flash_nucleo_firmware()?; } @@ -149,9 +158,22 @@ pub async fn create_peripheral_tools(config: &PeripheralsConfig) -> Result bool { + pin <= MAX_DIGITAL_PIN +} + +fn is_valid_pwm_pin(pin: u64) -> bool { + PWM_PINS.contains(&pin) +} +fn is_valid_adc_channel(channel: u64) -> bool { + channel <= MAX_ADC_CHANNEL +} + +fn is_valid_rgb_led_id(id: u64) -> bool { + (MIN_RGB_LED_ID..=MAX_RGB_LED_ID).contains(&id) +} + +// --------------------------------------------------------------------------- +// Bridge communication helpers +// --------------------------------------------------------------------------- + +/// Send a command to the Bridge app over TCP and return the response string. async fn bridge_request(cmd: &str, args: &[String]) -> anyhow::Result { let addr = format!("{}:{}", BRIDGE_HOST, BRIDGE_PORT); let mut stream = tokio::time::timeout(Duration::from_secs(5), TcpStream::connect(&addr)) .await .map_err(|_| anyhow::anyhow!("Bridge connection timed out"))??; - let msg = format!("{} {}\n", cmd, args.join(" ")); + let msg = if args.is_empty() { + format!("{}\n", cmd) + } else { + format!("{} {}\n", cmd, args.join(" ")) + }; stream.write_all(msg.as_bytes()).await?; - let mut buf = vec![0u8; 64]; + let mut buf = vec![0u8; 4096]; let n = tokio::time::timeout(Duration::from_secs(3), stream.read(&mut buf)) .await .map_err(|_| anyhow::anyhow!("Bridge response timed out"))??; @@ -30,17 +72,55 @@ async fn bridge_request(cmd: &str, args: &[String]) -> anyhow::Result { Ok(resp) } -/// Tool: read GPIO pin via Uno Q Bridge. +/// Convert a bridge response string into a `ToolResult`. +/// Responses prefixed with "error:" are treated as failures. +fn bridge_response_to_result(resp: &str) -> ToolResult { + if resp.starts_with("error:") { + ToolResult { + success: false, + output: resp.to_string(), + error: Some(resp.to_string()), + } + } else { + ToolResult { + success: true, + output: resp.to_string(), + error: None, + } + } +} + +/// Combined helper: send a bridge request and convert the response to a `ToolResult`. +async fn bridge_tool_request(cmd: &str, args: &[String]) -> ToolResult { + match bridge_request(cmd, args).await { + Ok(resp) => bridge_response_to_result(&resp), + Err(e) => ToolResult { + success: false, + output: format!("Bridge error: {}", e), + error: Some(e.to_string()), + }, + } +} + +// =========================================================================== +// MCU Tools (10) — via Bridge socket +// =========================================================================== + +// --------------------------------------------------------------------------- +// 1. GPIO Read +// --------------------------------------------------------------------------- + +/// Read a digital GPIO pin value (0 or 1) on the Uno Q MCU. pub struct UnoQGpioReadTool; #[async_trait] impl Tool for UnoQGpioReadTool { fn name(&self) -> &str { - "gpio_read" + "uno_q_gpio_read" } fn description(&self) -> &str { - "Read GPIO pin value (0 or 1) on Arduino Uno Q. Requires zeroclaw-uno-q-bridge app running." + "Read digital GPIO pin value (0 or 1) on Arduino UNO R4 WiFi MCU via Bridge." } fn parameters_schema(&self) -> Value { @@ -49,7 +129,9 @@ impl Tool for UnoQGpioReadTool { "properties": { "pin": { "type": "integer", - "description": "GPIO pin number (e.g. 13 for LED)" + "description": "GPIO pin number (0-21)", + "minimum": 0, + "maximum": 21 } }, "required": ["pin"] @@ -61,42 +143,34 @@ impl Tool for UnoQGpioReadTool { .get("pin") .and_then(|v| v.as_u64()) .ok_or_else(|| anyhow::anyhow!("Missing 'pin' parameter"))?; - match bridge_request("gpio_read", &[pin.to_string()]).await { - Ok(resp) => { - if resp.starts_with("error:") { - Ok(ToolResult { - success: false, - output: resp.clone(), - error: Some(resp), - }) - } else { - Ok(ToolResult { - success: true, - output: resp, - error: None, - }) - } - } - Err(e) => Ok(ToolResult { + + if !is_valid_digital_pin(pin) { + return Ok(ToolResult { success: false, - output: format!("Bridge error: {}", e), - error: Some(e.to_string()), - }), + output: format!("Invalid pin: {}. Must be 0-{}.", pin, MAX_DIGITAL_PIN), + error: Some(format!("Invalid pin: {}", pin)), + }); } + + Ok(bridge_tool_request("gpio_read", &[pin.to_string()]).await) } } -/// Tool: write GPIO pin via Uno Q Bridge. +// --------------------------------------------------------------------------- +// 2. GPIO Write +// --------------------------------------------------------------------------- + +/// Write a digital GPIO pin value (0 or 1) on the Uno Q MCU. pub struct UnoQGpioWriteTool; #[async_trait] impl Tool for UnoQGpioWriteTool { fn name(&self) -> &str { - "gpio_write" + "uno_q_gpio_write" } fn description(&self) -> &str { - "Set GPIO pin high (1) or low (0) on Arduino Uno Q. Requires zeroclaw-uno-q-bridge app running." + "Set digital GPIO pin high (1) or low (0) on Arduino UNO R4 WiFi MCU via Bridge." } fn parameters_schema(&self) -> Value { @@ -105,11 +179,15 @@ impl Tool for UnoQGpioWriteTool { "properties": { "pin": { "type": "integer", - "description": "GPIO pin number" + "description": "GPIO pin number (0-21)", + "minimum": 0, + "maximum": 21 }, "value": { "type": "integer", - "description": "0 for low, 1 for high" + "description": "0 for low, 1 for high", + "minimum": 0, + "maximum": 1 } }, "required": ["pin", "value"] @@ -125,27 +203,940 @@ impl Tool for UnoQGpioWriteTool { .get("value") .and_then(|v| v.as_u64()) .ok_or_else(|| anyhow::anyhow!("Missing 'value' parameter"))?; - match bridge_request("gpio_write", &[pin.to_string(), value.to_string()]).await { - Ok(resp) => { - if resp.starts_with("error:") { + + if !is_valid_digital_pin(pin) { + return Ok(ToolResult { + success: false, + output: format!("Invalid pin: {}. Must be 0-{}.", pin, MAX_DIGITAL_PIN), + error: Some(format!("Invalid pin: {}", pin)), + }); + } + + Ok(bridge_tool_request("gpio_write", &[pin.to_string(), value.to_string()]).await) + } +} + +// --------------------------------------------------------------------------- +// 3. ADC Read +// --------------------------------------------------------------------------- + +/// Read an analog value from an ADC channel on the Uno Q MCU. +pub struct UnoQAdcReadTool; + +#[async_trait] +impl Tool for UnoQAdcReadTool { + fn name(&self) -> &str { + "uno_q_adc_read" + } + + fn description(&self) -> &str { + "Read analog value from ADC channel (0-5) on Arduino UNO R4 WiFi MCU. WARNING: 3.3V max input on ADC pins." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": { + "channel": { + "type": "integer", + "description": "ADC channel number (0-5). WARNING: 3.3V max input.", + "minimum": 0, + "maximum": 5 + } + }, + "required": ["channel"] + }) + } + + async fn execute(&self, args: Value) -> anyhow::Result { + let channel = args + .get("channel") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow::anyhow!("Missing 'channel' parameter"))?; + + if !is_valid_adc_channel(channel) { + return Ok(ToolResult { + success: false, + output: format!( + "Invalid ADC channel: {}. Must be 0-{}.", + channel, MAX_ADC_CHANNEL + ), + error: Some(format!("Invalid ADC channel: {}", channel)), + }); + } + + Ok(bridge_tool_request("adc_read", &[channel.to_string()]).await) + } +} + +// --------------------------------------------------------------------------- +// 4. PWM Write +// --------------------------------------------------------------------------- + +/// Write a PWM duty cycle to a PWM-capable pin on the Uno Q MCU. +pub struct UnoQPwmWriteTool; + +#[async_trait] +impl Tool for UnoQPwmWriteTool { + fn name(&self) -> &str { + "uno_q_pwm_write" + } + + fn description(&self) -> &str { + "Write PWM duty cycle (0-255) to a PWM-capable pin on Arduino UNO R4 WiFi MCU. PWM pins: 3, 5, 6, 9, 10, 11." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": { + "pin": { + "type": "integer", + "description": "PWM-capable pin (3, 5, 6, 9, 10, 11)", + "enum": [3, 5, 6, 9, 10, 11] + }, + "duty": { + "type": "integer", + "description": "PWM duty cycle (0-255)", + "minimum": 0, + "maximum": 255 + } + }, + "required": ["pin", "duty"] + }) + } + + async fn execute(&self, args: Value) -> anyhow::Result { + let pin = args + .get("pin") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow::anyhow!("Missing 'pin' parameter"))?; + let duty = args + .get("duty") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow::anyhow!("Missing 'duty' parameter"))?; + + if !is_valid_pwm_pin(pin) { + return Ok(ToolResult { + success: false, + output: format!( + "Pin {} is not PWM-capable. Valid PWM pins: {:?}.", + pin, PWM_PINS + ), + error: Some(format!("Pin {} is not PWM-capable", pin)), + }); + } + + Ok(bridge_tool_request("pwm_write", &[pin.to_string(), duty.to_string()]).await) + } +} + +// --------------------------------------------------------------------------- +// 5. I2C Scan +// --------------------------------------------------------------------------- + +/// Scan the I2C bus for connected devices on the Uno Q MCU. +pub struct UnoQI2cScanTool; + +#[async_trait] +impl Tool for UnoQI2cScanTool { + fn name(&self) -> &str { + "uno_q_i2c_scan" + } + + fn description(&self) -> &str { + "Scan I2C bus for connected devices on Arduino UNO R4 WiFi MCU. Returns list of detected addresses." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": {}, + "required": [] + }) + } + + async fn execute(&self, _args: Value) -> anyhow::Result { + Ok(bridge_tool_request("i2c_scan", &[]).await) + } +} + +// --------------------------------------------------------------------------- +// 6. I2C Transfer +// --------------------------------------------------------------------------- + +/// Perform an I2C read/write transfer on the Uno Q MCU. +pub struct UnoQI2cTransferTool; + +#[async_trait] +impl Tool for UnoQI2cTransferTool { + fn name(&self) -> &str { + "uno_q_i2c_transfer" + } + + fn description(&self) -> &str { + "Perform I2C transfer on Arduino UNO R4 WiFi MCU. Write data and/or read bytes from a device address." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": { + "address": { + "type": "integer", + "description": "I2C device address (1-126)", + "minimum": 1, + "maximum": 126 + }, + "data": { + "type": "string", + "description": "Hex string of bytes to write (e.g. 'A0FF')" + }, + "read_length": { + "type": "integer", + "description": "Number of bytes to read back", + "minimum": 0 + } + }, + "required": ["address", "data", "read_length"] + }) + } + + async fn execute(&self, args: Value) -> anyhow::Result { + let address = args + .get("address") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow::anyhow!("Missing 'address' parameter"))?; + let data = args + .get("data") + .and_then(|v| v.as_str()) + .ok_or_else(|| anyhow::anyhow!("Missing 'data' parameter"))?; + let read_length = args + .get("read_length") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow::anyhow!("Missing 'read_length' parameter"))?; + + if !(1..=126).contains(&address) { + return Ok(ToolResult { + success: false, + output: format!("Invalid I2C address: {}. Must be 1-126.", address), + error: Some(format!("Invalid I2C address: {}", address)), + }); + } + + Ok(bridge_tool_request( + "i2c_transfer", + &[ + address.to_string(), + data.to_string(), + read_length.to_string(), + ], + ) + .await) + } +} + +// --------------------------------------------------------------------------- +// 7. SPI Transfer +// --------------------------------------------------------------------------- + +/// Perform an SPI transfer on the Uno Q MCU. +pub struct UnoQSpiTransferTool; + +#[async_trait] +impl Tool for UnoQSpiTransferTool { + fn name(&self) -> &str { + "uno_q_spi_transfer" + } + + fn description(&self) -> &str { + "Perform SPI transfer on Arduino UNO R4 WiFi MCU. Send and receive data bytes." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": { + "data": { + "type": "string", + "description": "Hex string of bytes to transfer (e.g. 'DEADBEEF')" + } + }, + "required": ["data"] + }) + } + + async fn execute(&self, args: Value) -> anyhow::Result { + let data = args + .get("data") + .and_then(|v| v.as_str()) + .ok_or_else(|| anyhow::anyhow!("Missing 'data' parameter"))?; + + Ok(bridge_tool_request("spi_transfer", &[data.to_string()]).await) + } +} + +// --------------------------------------------------------------------------- +// 8. CAN Send +// --------------------------------------------------------------------------- + +/// Send a CAN bus frame on the Uno Q MCU. +pub struct UnoQCanSendTool; + +#[async_trait] +impl Tool for UnoQCanSendTool { + fn name(&self) -> &str { + "uno_q_can_send" + } + + fn description(&self) -> &str { + "Send a CAN bus frame on Arduino UNO R4 WiFi MCU. Standard 11-bit CAN ID (0-2047)." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": { + "id": { + "type": "integer", + "description": "CAN message ID (0-2047, standard 11-bit)", + "minimum": 0, + "maximum": 2047 + }, + "data": { + "type": "string", + "description": "Hex string of data bytes (up to 8 bytes, e.g. 'DEADBEEF')" + } + }, + "required": ["id", "data"] + }) + } + + async fn execute(&self, args: Value) -> anyhow::Result { + let id = args + .get("id") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow::anyhow!("Missing 'id' parameter"))?; + let data = args + .get("data") + .and_then(|v| v.as_str()) + .ok_or_else(|| anyhow::anyhow!("Missing 'data' parameter"))?; + + if id > 2047 { + return Ok(ToolResult { + success: false, + output: format!("Invalid CAN ID: {}. Must be 0-2047.", id), + error: Some(format!("Invalid CAN ID: {}", id)), + }); + } + + Ok(bridge_tool_request("can_send", &[id.to_string(), data.to_string()]).await) + } +} + +// --------------------------------------------------------------------------- +// 9. LED Matrix +// --------------------------------------------------------------------------- + +/// Control the 12x8 LED matrix on the Uno Q board. +pub struct UnoQLedMatrixTool; + +#[async_trait] +impl Tool for UnoQLedMatrixTool { + fn name(&self) -> &str { + "uno_q_led_matrix" + } + + fn description(&self) -> &str { + "Set the 12x8 LED matrix bitmap on Arduino UNO R4 WiFi. Send 13 bytes (26 hex chars) as bitmap data." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": { + "bitmap": { + "type": "string", + "description": "Hex string bitmap for 12x8 LED matrix (26 hex chars = 13 bytes)" + } + }, + "required": ["bitmap"] + }) + } + + async fn execute(&self, args: Value) -> anyhow::Result { + let bitmap = args + .get("bitmap") + .and_then(|v| v.as_str()) + .ok_or_else(|| anyhow::anyhow!("Missing 'bitmap' parameter"))?; + + if bitmap.len() != 26 { + return Ok(ToolResult { + success: false, + output: format!( + "Invalid bitmap length: {} chars. Expected 26 hex chars (13 bytes).", + bitmap.len() + ), + error: Some(format!("Invalid bitmap length: {}", bitmap.len())), + }); + } + + Ok(bridge_tool_request("led_matrix", &[bitmap.to_string()]).await) + } +} + +// --------------------------------------------------------------------------- +// 10. RGB LED (MCU-side, IDs 3-4) +// --------------------------------------------------------------------------- + +/// Control MCU-side RGB LEDs (IDs 3-4) on the Uno Q board. +pub struct UnoQRgbLedTool; + +#[async_trait] +impl Tool for UnoQRgbLedTool { + fn name(&self) -> &str { + "uno_q_rgb_led" + } + + fn description(&self) -> &str { + "Set MCU-side RGB LED color on Arduino UNO R4 WiFi. LED IDs: 3 or 4. RGB values 0-255." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": { + "id": { + "type": "integer", + "description": "RGB LED ID (3 or 4)", + "enum": [3, 4] + }, + "r": { + "type": "integer", + "description": "Red value (0-255)", + "minimum": 0, + "maximum": 255 + }, + "g": { + "type": "integer", + "description": "Green value (0-255)", + "minimum": 0, + "maximum": 255 + }, + "b": { + "type": "integer", + "description": "Blue value (0-255)", + "minimum": 0, + "maximum": 255 + } + }, + "required": ["id", "r", "g", "b"] + }) + } + + async fn execute(&self, args: Value) -> anyhow::Result { + let id = args + .get("id") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow::anyhow!("Missing 'id' parameter"))?; + let r = args + .get("r") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow::anyhow!("Missing 'r' parameter"))?; + let g = args + .get("g") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow::anyhow!("Missing 'g' parameter"))?; + let b = args + .get("b") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow::anyhow!("Missing 'b' parameter"))?; + + if !is_valid_rgb_led_id(id) { + return Ok(ToolResult { + success: false, + output: format!( + "Invalid LED ID: {}. Must be {} or {}.", + id, MIN_RGB_LED_ID, MAX_RGB_LED_ID + ), + error: Some(format!("Invalid LED ID: {}", id)), + }); + } + + Ok(bridge_tool_request( + "rgb_led", + &[id.to_string(), r.to_string(), g.to_string(), b.to_string()], + ) + .await) + } +} + +// =========================================================================== +// Linux Tools (3) — direct MPU access +// =========================================================================== + +// --------------------------------------------------------------------------- +// 11. Camera Capture +// --------------------------------------------------------------------------- + +/// Capture an image from the Uno Q on-board camera via GStreamer. +pub struct UnoQCameraCaptureTool; + +#[async_trait] +impl Tool for UnoQCameraCaptureTool { + fn name(&self) -> &str { + "uno_q_camera_capture" + } + + fn description(&self) -> &str { + "Capture an image from the on-board camera on Uno Q Linux MPU using GStreamer (gst-launch-1.0)." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": { + "output_path": { + "type": "string", + "description": "Output file path for the captured image (default: /tmp/capture.jpg)" + } + }, + "required": [] + }) + } + + async fn execute(&self, args: Value) -> anyhow::Result { + let output_path = args + .get("output_path") + .and_then(|v| v.as_str()) + .unwrap_or("/tmp/capture.jpg"); + + let output = tokio::process::Command::new("gst-launch-1.0") + .args([ + "v4l2src", + "num-buffers=1", + "!", + "image/jpeg,width=640,height=480", + "!", + "filesink", + &format!("location={}", output_path), + ]) + .output() + .await; + + match output { + Ok(out) => { + if out.status.success() { Ok(ToolResult { - success: false, - output: resp.clone(), - error: Some(resp), + success: true, + output: format!("Image captured to {}", output_path), + error: None, }) } else { + let stderr = String::from_utf8_lossy(&out.stderr).to_string(); Ok(ToolResult { - success: true, - output: "done".into(), - error: None, + success: false, + output: format!("Camera capture failed: {}", stderr), + error: Some(stderr), }) } } Err(e) => Ok(ToolResult { success: false, - output: format!("Bridge error: {}", e), + output: format!("Failed to run gst-launch-1.0: {}", e), error: Some(e.to_string()), }), } } } + +// --------------------------------------------------------------------------- +// 12. Linux RGB LED (sysfs, IDs 1-2) +// --------------------------------------------------------------------------- + +/// Control Linux-side RGB LEDs (IDs 1-2) via sysfs on the Uno Q board. +pub struct UnoQLinuxRgbLedTool; + +#[async_trait] +impl Tool for UnoQLinuxRgbLedTool { + fn name(&self) -> &str { + "uno_q_linux_rgb_led" + } + + fn description(&self) -> &str { + "Set Linux-side RGB LED color via sysfs on Uno Q. LED 1: user LEDs. LED 2: status LEDs. RGB values 0-255." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": { + "id": { + "type": "integer", + "description": "Linux RGB LED ID (1 or 2)", + "enum": [1, 2] + }, + "r": { + "type": "integer", + "description": "Red value (0-255)", + "minimum": 0, + "maximum": 255 + }, + "g": { + "type": "integer", + "description": "Green value (0-255)", + "minimum": 0, + "maximum": 255 + }, + "b": { + "type": "integer", + "description": "Blue value (0-255)", + "minimum": 0, + "maximum": 255 + } + }, + "required": ["id", "r", "g", "b"] + }) + } + + async fn execute(&self, args: Value) -> anyhow::Result { + let id = args + .get("id") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow::anyhow!("Missing 'id' parameter"))?; + let r = args + .get("r") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow::anyhow!("Missing 'r' parameter"))?; + let g = args + .get("g") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow::anyhow!("Missing 'g' parameter"))?; + let b = args + .get("b") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow::anyhow!("Missing 'b' parameter"))?; + + // LED 1: red:user / green:user / blue:user + // LED 2: red:panic / green:wlan / blue:bt + let (red_path, green_path, blue_path) = match id { + 1 => ( + "/sys/class/leds/red:user/brightness", + "/sys/class/leds/green:user/brightness", + "/sys/class/leds/blue:user/brightness", + ), + 2 => ( + "/sys/class/leds/red:panic/brightness", + "/sys/class/leds/green:wlan/brightness", + "/sys/class/leds/blue:bt/brightness", + ), + _ => { + return Ok(ToolResult { + success: false, + output: format!("Invalid Linux LED ID: {}. Must be 1 or 2.", id), + error: Some(format!("Invalid Linux LED ID: {}", id)), + }); + } + }; + + // Use blocking write in spawn_blocking to avoid blocking the async runtime + let r_str = r.to_string(); + let g_str = g.to_string(); + let b_str = b.to_string(); + let rp = red_path.to_string(); + let gp = green_path.to_string(); + let bp = blue_path.to_string(); + + let result = tokio::task::spawn_blocking(move || -> anyhow::Result<()> { + std::fs::write(&rp, &r_str)?; + std::fs::write(&gp, &g_str)?; + std::fs::write(&bp, &b_str)?; + Ok(()) + }) + .await; + + match result { + Ok(Ok(())) => Ok(ToolResult { + success: true, + output: format!("LED {} set to RGB({}, {}, {})", id, r, g, b), + error: None, + }), + Ok(Err(e)) => Ok(ToolResult { + success: false, + output: format!("Failed to write LED sysfs: {}", e), + error: Some(e.to_string()), + }), + Err(e) => Ok(ToolResult { + success: false, + output: format!("Task failed: {}", e), + error: Some(e.to_string()), + }), + } + } +} + +// --------------------------------------------------------------------------- +// 13. System Info +// --------------------------------------------------------------------------- + +/// Read system information from the Uno Q Linux MPU. +pub struct UnoQSystemInfoTool; + +#[async_trait] +impl Tool for UnoQSystemInfoTool { + fn name(&self) -> &str { + "uno_q_system_info" + } + + fn description(&self) -> &str { + "Read system information from the Uno Q Linux MPU: CPU temperature, memory, disk, and WiFi status." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": {}, + "required": [] + }) + } + + async fn execute(&self, _args: Value) -> anyhow::Result { + let mut info_parts: Vec = Vec::new(); + + // CPU temperature + match tokio::fs::read_to_string("/sys/class/thermal/thermal_zone0/temp").await { + Ok(temp_str) => { + if let Ok(millideg) = temp_str.trim().parse::() { + info_parts.push(format!("CPU temp: {:.1}C", millideg / 1000.0)); + } else { + info_parts.push(format!("CPU temp raw: {}", temp_str.trim())); + } + } + Err(e) => info_parts.push(format!("CPU temp: unavailable ({})", e)), + } + + // Memory info (first 3 lines of /proc/meminfo) + match tokio::fs::read_to_string("/proc/meminfo").await { + Ok(meminfo) => { + let lines: Vec<&str> = meminfo.lines().take(3).collect(); + info_parts.push(format!("Memory: {}", lines.join("; "))); + } + Err(e) => info_parts.push(format!("Memory: unavailable ({})", e)), + } + + // Disk usage + match tokio::process::Command::new("df") + .args(["-h", "/"]) + .output() + .await + { + Ok(out) if out.status.success() => { + let stdout = String::from_utf8_lossy(&out.stdout).to_string(); + info_parts.push(format!("Disk:\n{}", stdout.trim())); + } + Ok(out) => { + let stderr = String::from_utf8_lossy(&out.stderr).to_string(); + info_parts.push(format!("Disk: error ({})", stderr.trim())); + } + Err(e) => info_parts.push(format!("Disk: unavailable ({})", e)), + } + + // WiFi status + match tokio::process::Command::new("iwconfig") + .arg("wlan0") + .output() + .await + { + Ok(out) if out.status.success() => { + let stdout = String::from_utf8_lossy(&out.stdout).to_string(); + info_parts.push(format!("WiFi:\n{}", stdout.trim())); + } + Ok(out) => { + let stderr = String::from_utf8_lossy(&out.stderr).to_string(); + info_parts.push(format!("WiFi: error ({})", stderr.trim())); + } + Err(e) => info_parts.push(format!("WiFi: unavailable ({})", e)), + } + + Ok(ToolResult { + success: true, + output: info_parts.join("\n"), + error: None, + }) + } +} + +// =========================================================================== +// Tests +// =========================================================================== + +#[cfg(test)] +mod tests { + use super::*; + + // -- Pin/channel validation -- + + #[test] + fn valid_digital_pins_accepted() { + for pin in 0..=21 { + assert!(is_valid_digital_pin(pin), "pin {} should be valid", pin); + } + } + + #[test] + fn invalid_digital_pins_rejected() { + assert!(!is_valid_digital_pin(22)); + assert!(!is_valid_digital_pin(100)); + } + + #[test] + fn valid_pwm_pins_accepted() { + for pin in &[3, 5, 6, 9, 10, 11] { + assert!(is_valid_pwm_pin(*pin), "pin {} should be PWM-capable", pin); + } + } + + #[test] + fn non_pwm_pins_rejected() { + for pin in &[0, 1, 2, 4, 7, 8, 12, 13] { + assert!( + !is_valid_pwm_pin(*pin), + "pin {} should not be PWM-capable", + pin + ); + } + } + + #[test] + fn valid_adc_channels_accepted() { + for ch in 0..=5 { + assert!(is_valid_adc_channel(ch), "channel {} should be valid", ch); + } + } + + #[test] + fn invalid_adc_channels_rejected() { + assert!(!is_valid_adc_channel(6)); + assert!(!is_valid_adc_channel(100)); + } + + #[test] + fn valid_rgb_led_ids() { + assert!(is_valid_rgb_led_id(3)); + assert!(is_valid_rgb_led_id(4)); + assert!(!is_valid_rgb_led_id(1)); + assert!(!is_valid_rgb_led_id(5)); + } + + // -- Bridge response conversion -- + + #[test] + fn bridge_result_ok_response() { + let result = bridge_response_to_result("ok"); + assert!(result.success); + assert_eq!(result.output, "ok"); + assert!(result.error.is_none()); + } + + #[test] + fn bridge_result_error_response() { + let result = bridge_response_to_result("error: pin not found"); + assert!(!result.success); + assert_eq!(result.output, "error: pin not found"); + assert!(result.error.is_some()); + } + + #[test] + fn bridge_result_numeric_response() { + let result = bridge_response_to_result("2048"); + assert!(result.success); + assert_eq!(result.output, "2048"); + assert!(result.error.is_none()); + } + + // -- Tool schema validation -- + + #[test] + fn gpio_read_tool_schema() { + let tool = UnoQGpioReadTool; + assert_eq!(tool.name(), "uno_q_gpio_read"); + let schema = tool.parameters_schema(); + assert!(schema["properties"]["pin"].is_object()); + } + + #[test] + fn adc_read_tool_schema() { + let tool = UnoQAdcReadTool; + assert_eq!(tool.name(), "uno_q_adc_read"); + let schema = tool.parameters_schema(); + assert!(schema["properties"]["channel"].is_object()); + } + + #[test] + fn pwm_write_tool_schema() { + let tool = UnoQPwmWriteTool; + assert_eq!(tool.name(), "uno_q_pwm_write"); + let schema = tool.parameters_schema(); + assert!(schema["properties"]["pin"].is_object()); + assert!(schema["properties"]["duty"].is_object()); + } + + // -- Tool execute: input validation (no bridge needed) -- + + #[tokio::test] + async fn gpio_read_rejects_invalid_pin() { + let tool = UnoQGpioReadTool; + let result = tool.execute(json!({"pin": 99})).await.unwrap(); + assert!(!result.success); + assert!(result.output.contains("Invalid pin")); + } + + #[tokio::test] + async fn pwm_write_rejects_non_pwm_pin() { + let tool = UnoQPwmWriteTool; + let result = tool.execute(json!({"pin": 2, "duty": 128})).await.unwrap(); + assert!(!result.success); + assert!(result.output.contains("not PWM-capable")); + } + + #[tokio::test] + async fn adc_read_rejects_invalid_channel() { + let tool = UnoQAdcReadTool; + let result = tool.execute(json!({"channel": 7})).await.unwrap(); + assert!(!result.success); + assert!(result.output.contains("Invalid ADC channel")); + } + + #[tokio::test] + async fn rgb_led_rejects_invalid_id() { + let tool = UnoQRgbLedTool; + let result = tool + .execute(json!({"id": 1, "r": 255, "g": 0, "b": 0})) + .await + .unwrap(); + assert!(!result.success); + assert!(result.output.contains("Invalid LED ID")); + } + + #[tokio::test] + async fn can_send_rejects_invalid_id() { + let tool = UnoQCanSendTool; + let result = tool + .execute(json!({"id": 9999, "data": "DEADBEEF"})) + .await + .unwrap(); + assert!(!result.success); + assert!(result.output.contains("Invalid CAN ID")); + } + + #[tokio::test] + async fn i2c_transfer_rejects_invalid_address() { + let tool = UnoQI2cTransferTool; + let result = tool + .execute(json!({"address": 0, "data": "FF", "read_length": 1})) + .await + .unwrap(); + assert!(!result.success); + assert!(result.output.contains("Invalid I2C address")); + } +} diff --git a/src/peripherals/uno_q_setup.rs b/src/peripherals/uno_q_setup.rs index 424bc89e40..cc5071750e 100644 --- a/src/peripherals/uno_q_setup.rs +++ b/src/peripherals/uno_q_setup.rs @@ -141,3 +141,64 @@ fn copy_dir(src: &std::path::Path, dst: &std::path::Path) -> Result<()> { } Ok(()) } + +/// Deploy ZeroClaw binary + config to Arduino Uno Q via SSH/SCP. +/// +/// Expects a cross-compiled binary at `target/aarch64-unknown-linux-gnu/release/zeroclaw`. +pub fn deploy_uno_q(host: &str) -> Result<()> { + let ssh_target = if host.contains('@') { + host.to_string() + } else { + format!("arduino@{}", host) + }; + + let binary = std::path::Path::new(env!("CARGO_MANIFEST_DIR")) + .join("target") + .join("aarch64-unknown-linux-gnu") + .join("release") + .join("zeroclaw"); + + if !binary.exists() { + anyhow::bail!( + "Cross-compiled binary not found at {}.\nBuild with: ./dev/cross-uno-q.sh", + binary.display() + ); + } + + println!("Creating remote directory on {}...", host); + let status = Command::new("ssh") + .args([&ssh_target, "mkdir", "-p", "~/zeroclaw"]) + .status() + .context("ssh mkdir failed")?; + if !status.success() { + anyhow::bail!("Failed to create ~/zeroclaw on Uno Q"); + } + + println!("Copying zeroclaw binary..."); + let status = Command::new("scp") + .args([ + binary.to_str().unwrap(), + &format!("{}:~/zeroclaw/zeroclaw", ssh_target), + ]) + .status() + .context("scp binary failed")?; + if !status.success() { + anyhow::bail!("Failed to copy binary"); + } + + let status = Command::new("ssh") + .args([&ssh_target, "chmod", "+x", "~/zeroclaw/zeroclaw"]) + .status() + .context("ssh chmod failed")?; + if !status.success() { + anyhow::bail!("Failed to set executable bit"); + } + + println!(); + println!("ZeroClaw deployed to Uno Q!"); + println!(" Binary: ~/zeroclaw/zeroclaw"); + println!(); + println!("Start with: ssh {} '~/zeroclaw/zeroclaw agent'", ssh_target); + + Ok(()) +} From 588a1174477dc0ec3d1ad5ea42d8577b9f157e7d Mon Sep 17 00:00:00 2001 From: Todd Gruben Date: Wed, 18 Feb 2026 16:45:48 -0600 Subject: [PATCH 02/14] fix(peripherals): fix UNO Q Bridge for real hardware deployment MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Three issues discovered during deployment to actual UNO Q hardware: 1. Bridge.call() takes positional args, not a list — changed from Bridge.call("digitalRead", [pin]) to Bridge.call("digitalRead", pin) 2. Bridge.call() must run on main thread (not thread-safe) — restructured socket server to use a queue pattern: accept thread enqueues requests, main App.run() loop drains queue and calls Bridge 3. Docker container networking requires 0.0.0.0 bind (not 127.0.0.1) 4. Wire/SPI are built into Zephyr platform, removed from sketch.yaml 5. Renamed C++ functions to bridge_* prefix to avoid Arduino built-in clashes 6. Changed const char* params to String for MsgPack RPC compatibility Tested on hannah.local: gpio_read, gpio_write, adc_read, pwm_write, capabilities all confirmed working. --- firmware/zeroclaw-uno-q-bridge/python/main.py | 90 ++++++++++--------- .../zeroclaw-uno-q-bridge/sketch/sketch.ino | 69 +++++++------- .../zeroclaw-uno-q-bridge/sketch/sketch.yaml | 2 - 3 files changed, 79 insertions(+), 82 deletions(-) diff --git a/firmware/zeroclaw-uno-q-bridge/python/main.py b/firmware/zeroclaw-uno-q-bridge/python/main.py index 487f74f4fd..8079e5b107 100644 --- a/firmware/zeroclaw-uno-q-bridge/python/main.py +++ b/firmware/zeroclaw-uno-q-bridge/python/main.py @@ -1,49 +1,49 @@ # ZeroClaw Bridge — socket server for full MCU peripheral control # SPDX-License-Identifier: MPL-2.0 +# +# Bridge.call() must run on the main thread (not thread-safe). +# Socket accepts happen on a background thread, but each request +# is queued and processed in the main App.run() loop. +import queue import socket +import sys import threading -from arduino.app_utils import App, Bridge +import traceback +from arduino.app_utils import * ZEROCLAW_PORT = 9999 +# Queue of (conn, data_str) tuples processed on the main thread. +request_queue = queue.Queue() -def handle_client(conn): + +def process_request(data, conn): + """Process a single bridge command on the main thread.""" try: - data = conn.recv(1024).decode().strip() - if not data: - conn.close() - return parts = data.split() - if len(parts) < 1: + if not parts: conn.sendall(b"error: empty command\n") - conn.close() return cmd = parts[0].lower() # ── GPIO ────────────────────────────────────────────── if cmd == "gpio_write" and len(parts) >= 3: - pin = int(parts[1]) - value = int(parts[2]) - Bridge.call("digitalWrite", [pin, value]) + Bridge.call("digitalWrite", int(parts[1]), int(parts[2])) conn.sendall(b"ok\n") elif cmd == "gpio_read" and len(parts) >= 2: - pin = int(parts[1]) - val = Bridge.call("digitalRead", [pin]) + val = Bridge.call("digitalRead", int(parts[1])) conn.sendall(f"{val}\n".encode()) # ── ADC ─────────────────────────────────────────────── elif cmd == "adc_read" and len(parts) >= 2: - channel = int(parts[1]) - val = Bridge.call("analogRead", [channel]) + val = Bridge.call("analogRead", int(parts[1])) conn.sendall(f"{val}\n".encode()) # ── PWM ─────────────────────────────────────────────── elif cmd == "pwm_write" and len(parts) >= 3: - pin = int(parts[1]) - duty = int(parts[2]) - result = Bridge.call("analogWrite", [pin, duty]) + result = Bridge.call("analogWrite", int(parts[1]), int(parts[2])) if result == -1: conn.sendall(b"error: not a PWM pin\n") else: @@ -51,27 +51,21 @@ def handle_client(conn): # ── I2C ─────────────────────────────────────────────── elif cmd == "i2c_scan": - result = Bridge.call("i2cScan", []) + result = Bridge.call("i2cScan") conn.sendall(f"{result}\n".encode()) elif cmd == "i2c_transfer" and len(parts) >= 4: - addr = int(parts[1]) - hex_data = parts[2] - rx_len = int(parts[3]) - result = Bridge.call("i2cTransfer", [addr, hex_data, rx_len]) + result = Bridge.call("i2cTransfer", int(parts[1]), parts[2], int(parts[3])) conn.sendall(f"{result}\n".encode()) # ── SPI ─────────────────────────────────────────────── elif cmd == "spi_transfer" and len(parts) >= 2: - hex_data = parts[1] - result = Bridge.call("spiTransfer", [hex_data]) + result = Bridge.call("spiTransfer", parts[1]) conn.sendall(f"{result}\n".encode()) # ── CAN ─────────────────────────────────────────────── elif cmd == "can_send" and len(parts) >= 3: - can_id = int(parts[1]) - hex_data = parts[2] - result = Bridge.call("canSend", [can_id, hex_data]) + result = Bridge.call("canSend", int(parts[1]), parts[2]) if result == -2: conn.sendall(b"error: CAN not yet available\n") else: @@ -79,31 +73,28 @@ def handle_client(conn): # ── LED Matrix ──────────────────────────────────────── elif cmd == "led_matrix" and len(parts) >= 2: - hex_bitmap = parts[1] - Bridge.call("ledMatrix", [hex_bitmap]) + Bridge.call("ledMatrix", parts[1]) conn.sendall(b"ok\n") # ── RGB LED ─────────────────────────────────────────── elif cmd == "rgb_led" and len(parts) >= 5: - led_id = int(parts[1]) - r = int(parts[2]) - g = int(parts[3]) - b = int(parts[4]) - result = Bridge.call("rgbLed", [led_id, r, g, b]) + result = Bridge.call("rgbLed", int(parts[1]), int(parts[2]), int(parts[3]), int(parts[4])) if result == -1: - conn.sendall(b"error: invalid LED id (use 3 or 4)\n") + conn.sendall(b"error: invalid LED id (use 0 or 1)\n") else: conn.sendall(b"ok\n") # ── Capabilities ────────────────────────────────────── elif cmd == "capabilities": - result = Bridge.call("capabilities", []) + result = Bridge.call("capabilities") conn.sendall(f"{result}\n".encode()) else: conn.sendall(b"error: unknown command\n") except Exception as e: + print(f"[handle] ERROR: {e}", file=sys.stderr, flush=True) + traceback.print_exc(file=sys.stderr) try: conn.sendall(f"error: {e}\n".encode()) except Exception: @@ -113,28 +104,39 @@ def handle_client(conn): def accept_loop(server): + """Background thread: accept connections and enqueue requests.""" while True: try: conn, _ = server.accept() - t = threading.Thread(target=handle_client, args=(conn,)) - t.daemon = True - t.start() + data = conn.recv(1024).decode().strip() + if data: + request_queue.put((conn, data)) + else: + conn.close() + except socket.timeout: + continue except Exception: break def loop(): - App.sleep(1) + """Main-thread loop: drain the request queue and process via Bridge.""" + while not request_queue.empty(): + try: + conn, data = request_queue.get_nowait() + process_request(data, conn) + except queue.Empty: + break def main(): server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - server.bind(("127.0.0.1", ZEROCLAW_PORT)) + server.bind(("0.0.0.0", ZEROCLAW_PORT)) server.listen(5) server.settimeout(1.0) - t = threading.Thread(target=accept_loop, args=(server,)) - t.daemon = True + print(f"[ZeroClaw Bridge] Listening on 0.0.0.0:{ZEROCLAW_PORT}", flush=True) + t = threading.Thread(target=accept_loop, args=(server,), daemon=True) t.start() App.run(user_loop=loop) diff --git a/firmware/zeroclaw-uno-q-bridge/sketch/sketch.ino b/firmware/zeroclaw-uno-q-bridge/sketch/sketch.ino index f4c25d8515..7bc03e3751 100644 --- a/firmware/zeroclaw-uno-q-bridge/sketch/sketch.ino +++ b/firmware/zeroclaw-uno-q-bridge/sketch/sketch.ino @@ -46,22 +46,24 @@ static uint8_t hex_nibble(char c) { return 0; } -static int hex_decode(const char *hex, uint8_t *buf, int max_len) { +static int hex_decode(const String &hex, uint8_t *buf, int max_len) { int len = 0; - while (hex[0] && hex[1] && len < max_len) { - buf[len++] = (hex_nibble(hex[0]) << 4) | hex_nibble(hex[1]); - hex += 2; + int slen = hex.length(); + for (int i = 0; i + 1 < slen && len < max_len; i += 2) { + buf[len++] = (hex_nibble(hex.charAt(i)) << 4) | hex_nibble(hex.charAt(i + 1)); } return len; } -static void hex_encode(const uint8_t *data, int len, char *out) { +static String hex_encode(const uint8_t *data, int len) { static const char hexchars[] = "0123456789abcdef"; + String result; + result.reserve(len * 2); for (int i = 0; i < len; i++) { - out[i * 2] = hexchars[(data[i] >> 4) & 0x0F]; - out[i * 2 + 1] = hexchars[data[i] & 0x0F]; + result += hexchars[(data[i] >> 4) & 0x0F]; + result += hexchars[data[i] & 0x0F]; } - out[len * 2] = '\0'; + return result; } static bool is_pwm_pin(int pin) { @@ -85,7 +87,7 @@ int gpio_read(int pin) { // ── ADC (12-bit, A0-A5) ──────────────────────────────────────── -int adc_read(int channel) { +int bridge_adc_read(int channel) { int pin = ADC_FIRST_PIN + channel; if (pin < ADC_FIRST_PIN || pin > ADC_LAST_PIN) return -1; analogReadResolution(12); @@ -94,7 +96,7 @@ int adc_read(int channel) { // ── PWM (D3, D5, D6, D9, D10, D11) ───────────────────────────── -int pwm_write(int pin, int duty) { +int bridge_pwm_write(int pin, int duty) { if (!is_pwm_pin(pin)) return -1; if (duty < 0) duty = 0; if (duty > 255) duty = 255; @@ -105,7 +107,7 @@ int pwm_write(int pin, int duty) { // ── I2C scan ──────────────────────────────────────────────────── -String i2c_scan() { +String bridge_i2c_scan() { Wire.begin(); String result = ""; bool first = true; @@ -120,9 +122,9 @@ String i2c_scan() { return result.length() > 0 ? result : "none"; } -// ── I2C transfer ──────────────────────────────────────────────── +// ── I2C transfer (all String params for MsgPack compatibility) ── -String i2c_transfer(int addr, const char *hex_data, int rx_len) { +String bridge_i2c_transfer(int addr, String hex_data, int rx_len) { if (addr < 1 || addr > 127) return "err:addr"; if (rx_len < 0 || rx_len > 32) return "err:rxlen"; @@ -144,16 +146,14 @@ String i2c_transfer(int addr, const char *hex_data, int rx_len) { while (Wire.available() && count < rx_len) { rx_buf[count++] = Wire.read(); } - char hex_out[65]; - hex_encode(rx_buf, count, hex_out); - return String(hex_out); + return hex_encode(rx_buf, count); } return "ok"; } // ── SPI transfer ──────────────────────────────────────────────── -String spi_transfer(const char *hex_data) { +String bridge_spi_transfer(String hex_data) { uint8_t buf[32]; int len = hex_decode(hex_data, buf, sizeof(buf)); if (len == 0) return "err:empty"; @@ -166,14 +166,12 @@ String spi_transfer(const char *hex_data) { } SPI.endTransaction(); - char hex_out[65]; - hex_encode(rx_buf, len, hex_out); - return String(hex_out); + return hex_encode(rx_buf, len); } // ── CAN (stub — needs Zephyr FDCAN driver) ────────────────────── -int can_send(int id, const char *hex_data) { +int bridge_can_send(int id, String hex_data) { (void)id; (void)hex_data; return -2; // not yet available @@ -181,19 +179,18 @@ int can_send(int id, const char *hex_data) { // ── LED matrix (8x13, 13-byte bitmap) ─────────────────────────── -int led_matrix(const char *hex_bitmap) { +int bridge_led_matrix(String hex_bitmap) { uint8_t bitmap[LED_MATRIX_BYTES]; int len = hex_decode(hex_bitmap, bitmap, LED_MATRIX_BYTES); if (len != LED_MATRIX_BYTES) return -1; // Matrix rendering depends on board LED matrix driver availability. - // Bitmap accepted; actual display requires Arduino_LED_Matrix library. (void)bitmap; return 0; } // ── RGB LED (MCU LEDs 3-4, active-low) ────────────────────────── -int rgb_led(int id, int r, int g, int b) { +int bridge_rgb_led(int id, int r, int g, int b) { if (id < 0 || id >= RGB_LED_COUNT) return -1; r = constrain(r, 0, 255); g = constrain(g, 0, 255); @@ -209,7 +206,7 @@ int rgb_led(int id, int r, int g, int b) { // ── Capabilities ──────────────────────────────────────────────── -String get_capabilities() { +String bridge_get_capabilities() { return "gpio,adc,pwm,i2c,spi,can,led_matrix,rgb_led"; } @@ -217,17 +214,17 @@ String get_capabilities() { void setup() { Bridge.begin(); - Bridge.provide("digitalWrite", gpio_write); - Bridge.provide("digitalRead", gpio_read); - Bridge.provide("analogRead", adc_read); - Bridge.provide("analogWrite", pwm_write); - Bridge.provide("i2cScan", i2c_scan); - Bridge.provide("i2cTransfer", i2c_transfer); - Bridge.provide("spiTransfer", spi_transfer); - Bridge.provide("canSend", can_send); - Bridge.provide("ledMatrix", led_matrix); - Bridge.provide("rgbLed", rgb_led); - Bridge.provide("capabilities", get_capabilities); + Bridge.provide("digitalWrite", gpio_write); + Bridge.provide("digitalRead", gpio_read); + Bridge.provide("analogRead", bridge_adc_read); + Bridge.provide("analogWrite", bridge_pwm_write); + Bridge.provide("i2cScan", bridge_i2c_scan); + Bridge.provide("i2cTransfer", bridge_i2c_transfer); + Bridge.provide("spiTransfer", bridge_spi_transfer); + Bridge.provide("canSend", bridge_can_send); + Bridge.provide("ledMatrix", bridge_led_matrix); + Bridge.provide("rgbLed", bridge_rgb_led); + Bridge.provide("capabilities", bridge_get_capabilities); } void loop() { diff --git a/firmware/zeroclaw-uno-q-bridge/sketch/sketch.yaml b/firmware/zeroclaw-uno-q-bridge/sketch/sketch.yaml index 732e87b4b4..d9fe917efa 100644 --- a/firmware/zeroclaw-uno-q-bridge/sketch/sketch.yaml +++ b/firmware/zeroclaw-uno-q-bridge/sketch/sketch.yaml @@ -8,6 +8,4 @@ profiles: - DebugLog (0.8.4) - ArxContainer (0.7.0) - ArxTypeTraits (0.3.1) - - Wire - - SPI default_profile: default From 08511fd04762cfc37104375009264d8631638bfe Mon Sep 17 00:00:00 2001 From: Todd Gruben Date: Wed, 18 Feb 2026 17:27:21 -0600 Subject: [PATCH 03/14] feat(peripherals): update UNO Q camera tool for USB cameras + add musl cross-compile config - Camera capture tool now uses v4l2-ctl instead of GStreamer (works with USB cameras like NETUM, not just MIPI-CSI) - Tool output includes [IMAGE:] hint so Telegram channel sends the captured photo directly to the user - Added width/height/device parameters (defaults: 1280x720, /dev/video0) - Added aarch64-unknown-linux-musl linker config to .cargo/config.toml --- .cargo/config.toml | 4 ++ src/peripherals/uno_q_bridge.rs | 73 +++++++++++++++++++-------------- 2 files changed, 46 insertions(+), 31 deletions(-) diff --git a/.cargo/config.toml b/.cargo/config.toml index e1f508bbf0..12365ff039 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -2,4 +2,8 @@ rustflags = ["-C", "link-arg=-static"] [target.aarch64-unknown-linux-musl] +linker = "aarch64-linux-musl-gcc" rustflags = ["-C", "link-arg=-static"] + +[target.aarch64-unknown-linux-gnu] +linker = "aarch64-linux-gnu-gcc" diff --git a/src/peripherals/uno_q_bridge.rs b/src/peripherals/uno_q_bridge.rs index e27610045a..2c7db5eda3 100644 --- a/src/peripherals/uno_q_bridge.rs +++ b/src/peripherals/uno_q_bridge.rs @@ -689,61 +689,72 @@ impl Tool for UnoQCameraCaptureTool { } fn description(&self) -> &str { - "Capture an image from the on-board camera on Uno Q Linux MPU using GStreamer (gst-launch-1.0)." + "Capture a photo from the USB camera on Arduino Uno Q. Returns the image path. Include [IMAGE:] in your response to send it to the user." } fn parameters_schema(&self) -> Value { json!({ "type": "object", "properties": { - "output_path": { + "width": { + "type": "integer", + "description": "Image width in pixels (default: 1280)" + }, + "height": { + "type": "integer", + "description": "Image height in pixels (default: 720)" + }, + "device": { "type": "string", - "description": "Output file path for the captured image (default: /tmp/capture.jpg)" + "description": "V4L2 device path (default: /dev/video0)" } - }, - "required": [] + } }) } async fn execute(&self, args: Value) -> anyhow::Result { - let output_path = args - .get("output_path") + let width = args.get("width").and_then(|v| v.as_u64()).unwrap_or(1280); + let height = args.get("height").and_then(|v| v.as_u64()).unwrap_or(720); + let device = args + .get("device") .and_then(|v| v.as_str()) - .unwrap_or("/tmp/capture.jpg"); + .unwrap_or("/dev/video0"); + let output_path = "/tmp/zeroclaw_capture.jpg"; - let output = tokio::process::Command::new("gst-launch-1.0") + let fmt = format!("width={},height={},pixelformat=MJPG", width, height); + let output = tokio::process::Command::new("v4l2-ctl") .args([ - "v4l2src", - "num-buffers=1", - "!", - "image/jpeg,width=640,height=480", - "!", - "filesink", - &format!("location={}", output_path), + "-d", + device, + "--set-fmt-video", + &fmt, + "--stream-mmap", + "--stream-count=1", + &format!("--stream-to={}", output_path), ]) .output() .await; match output { + Ok(out) if out.status.success() => Ok(ToolResult { + success: true, + output: format!( + "Photo captured ({}x{}) to {}. To send it to the user, include [IMAGE:{}] in your response.", + width, height, output_path, output_path + ), + error: None, + }), Ok(out) => { - if out.status.success() { - Ok(ToolResult { - success: true, - output: format!("Image captured to {}", output_path), - error: None, - }) - } else { - let stderr = String::from_utf8_lossy(&out.stderr).to_string(); - Ok(ToolResult { - success: false, - output: format!("Camera capture failed: {}", stderr), - error: Some(stderr), - }) - } + let stderr = String::from_utf8_lossy(&out.stderr).to_string(); + Ok(ToolResult { + success: false, + output: format!("Camera capture failed: {}", stderr), + error: Some(stderr), + }) } Err(e) => Ok(ToolResult { success: false, - output: format!("Failed to run gst-launch-1.0: {}", e), + output: format!("Failed to run v4l2-ctl: {}. Is v4l-utils installed?", e), error: Some(e.to_string()), }), } From 73847e0057f136c7f5348795d181b9b71356fcae Mon Sep 17 00:00:00 2001 From: Todd Gruben Date: Wed, 18 Feb 2026 17:35:29 -0600 Subject: [PATCH 04/14] fix(peripherals): load peripheral tools in daemon/channel path + fix camera for USB MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The daemon's channel server was missing peripheral tools — only the interactive `agent` command loaded them. Now `start_channels()` calls `create_peripheral_tools()` so Telegram/Discord/Slack channels get access to all UNO Q hardware tools. Also updated camera capture tool description to guide the LLM to use [IMAGE:] markers for Telegram photo delivery. --- src/channels/mod.rs | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/channels/mod.rs b/src/channels/mod.rs index 0fff1ecbee..fe14ac5341 100644 --- a/src/channels/mod.rs +++ b/src/channels/mod.rs @@ -1532,7 +1532,7 @@ pub async fn start_channels(config: Config) -> Result<()> { }; // Build system prompt from workspace identity files + skills let workspace = config.workspace_dir.clone(); - let tools_registry = Arc::new(tools::all_tools_with_runtime( + let mut all_tools = tools::all_tools_with_runtime( Arc::new(config.clone()), &security, runtime, @@ -1545,7 +1545,17 @@ pub async fn start_channels(config: Config) -> Result<()> { &config.agents, config.api_key.as_deref(), &config, - )); + ); + + // Merge peripheral tools (UNO Q Bridge, RPi GPIO, etc.) + let peripheral_tools = + crate::peripherals::create_peripheral_tools(&config.peripherals).await?; + if !peripheral_tools.is_empty() { + tracing::info!(count = peripheral_tools.len(), "Peripheral tools added to channel server"); + all_tools.extend(peripheral_tools); + } + + let tools_registry = Arc::new(all_tools); let skills = crate::skills::load_skills(&workspace); From 44aa42b5b45bd142b0110d88709f8a3018d721ec Mon Sep 17 00:00:00 2001 From: lin Date: Sat, 21 Feb 2026 07:07:14 +0800 Subject: [PATCH 05/14] Readme zeroclaw-labs to openagen --- README.md | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index 5754a8d8fa..a871ac7255 100644 --- a/README.md +++ b/README.md @@ -58,7 +58,6 @@ Use this board for important notices (breaking changes, security advisories, mai | Date (UTC) | Level | Notice | Action | |---|---|---|---| -| 2026-02-19 | _Critical_ | We are **not affiliated** with `openagen/zeroclaw` or `zeroclaw.org`. The `zeroclaw.org` domain currently points to the `openagen/zeroclaw` fork, and that domain/repository are impersonating our official website/project. | Do not trust information, binaries, fundraising, or announcements from those sources. Use only this repository and our verified social accounts. | | 2026-02-19 | _Important_ | We have **not** launched an official website yet, and we are seeing impersonation attempts. Do **not** join any investment or fundraising activity claiming the ZeroClaw name. | Use this repository as the single source of truth. Follow [X (@zeroclawlabs)](https://x.com/zeroclawlabs?s=21), [Reddit (r/zeroclawlabs)](https://www.reddit.com/r/zeroclawlabs/), [Telegram (@zeroclawlabs)](https://t.me/zeroclawlabs), [Telegram CN (@zeroclawlabs_cn)](https://t.me/zeroclawlabs_cn), [Telegram RU (@zeroclawlabs_ru)](https://t.me/zeroclawlabs_ru), and [Xiaohongshu](https://www.xiaohongshu.com/user/profile/67cbfc43000000000d008307?xsec_token=AB73VnYnGNx5y36EtnnZfGmAmS-6Wzv8WMuGpfwfkg6Yc%3D&xsec_source=pc_search) for official updates. | | 2026-02-19 | _Important_ | Anthropic updated the Authentication and Credential Use terms on 2026-02-19. OAuth authentication (Free, Pro, Max) is intended exclusively for Claude Code and Claude.ai; using OAuth tokens from Claude Free/Pro/Max in any other product, tool, or service (including Agent SDK) is not permitted and may violate the Consumer Terms of Service. | Please temporarily avoid Claude Code OAuth integrations to prevent potential loss. Original clause: [Authentication and Credential Use](https://code.claude.com/docs/en/legal-and-compliance#authentication-and-credential-use). | @@ -170,7 +169,7 @@ Example sample (macOS arm64, measured on February 18, 2026): Or skip the steps above and install everything (system deps, Rust, ZeroClaw) in a single command: ```bash -curl -LsSf https://raw.githubusercontent.com/zeroclaw-labs/zeroclaw/main/scripts/install.sh | bash +curl -LsSf https://raw.githubusercontent.com/openagen/zeroclaw/main/scripts/install.sh | bash ``` #### Compilation resource requirements @@ -215,7 +214,7 @@ brew install zeroclaw ```bash # Recommended: clone then run local bootstrap script -git clone https://github.com/zeroclaw-labs/zeroclaw.git +git clone https://github.com/openagen/zeroclaw.git cd zeroclaw ./bootstrap.sh @@ -244,7 +243,7 @@ ZEROCLAW_CONTAINER_CLI=podman ./bootstrap.sh --docker Remote one-liner (review first in security-sensitive environments): ```bash -curl -fsSL https://raw.githubusercontent.com/zeroclaw-labs/zeroclaw/main/scripts/bootstrap.sh | bash +curl -fsSL https://raw.githubusercontent.com/openagen/zeroclaw/main/scripts/bootstrap.sh | bash ``` Details: [`docs/one-click-bootstrap.md`](docs/one-click-bootstrap.md) (toolchain mode may request `sudo` for system packages). @@ -258,18 +257,18 @@ Release assets are published for: - Windows: `x86_64` Download the latest assets from: - + Example (ARM64 Linux): ```bash -curl -fsSLO https://github.com/zeroclaw-labs/zeroclaw/releases/latest/download/zeroclaw-aarch64-unknown-linux-gnu.tar.gz +curl -fsSLO https://github.com/openagen/zeroclaw/releases/latest/download/zeroclaw-aarch64-unknown-linux-gnu.tar.gz tar xzf zeroclaw-aarch64-unknown-linux-gnu.tar.gz install -m 0755 zeroclaw "$HOME/.cargo/bin/zeroclaw" ``` ```bash -git clone https://github.com/zeroclaw-labs/zeroclaw.git +git clone https://github.com/openagen/zeroclaw.git cd zeroclaw cargo build --release --locked cargo install --path . --force --locked @@ -1053,11 +1052,11 @@ We're building in the open because the best ideas come from everywhere. If you'r ## ⚠️ Official Repository & Impersonation Warning **This is the only official ZeroClaw repository:** -> https://github.com/zeroclaw-labs/zeroclaw +> https://github.com/openagen/zeroclaw Any other repository, organization, domain, or package claiming to be "ZeroClaw" or implying affiliation with ZeroClaw Labs is **unauthorized and not affiliated with this project**. Known unauthorized forks will be listed in [TRADEMARK.md](TRADEMARK.md). -If you encounter impersonation or trademark misuse, please [open an issue](https://github.com/zeroclaw-labs/zeroclaw/issues). +If you encounter impersonation or trademark misuse, please [open an issue](https://github.com/openagen/zeroclaw/issues). --- @@ -1102,11 +1101,11 @@ See [CONTRIBUTING.md](CONTRIBUTING.md) and [CLA.md](CLA.md). Implement a trait, ## Star History

- + - - - Star History Chart + + + Star History Chart

From b1557e9b9e9b4f8cd5c5f0afb3448766f8414847 Mon Sep 17 00:00:00 2001 From: modpunk Date: Sat, 21 Feb 2026 23:05:21 +0000 Subject: [PATCH 06/14] docs: add FTMS (File/Text Management System) design doc Defines architecture for file upload, storage, text extraction, AI-powered media description, and FTS5 full-text search indexing. Co-Authored-By: Claude Opus 4.6 --- docs/plans/2026-02-21-ftms-design.md | 166 +++++++++++++++++++++++++++ 1 file changed, 166 insertions(+) create mode 100644 docs/plans/2026-02-21-ftms-design.md diff --git a/docs/plans/2026-02-21-ftms-design.md b/docs/plans/2026-02-21-ftms-design.md new file mode 100644 index 0000000000..32e1cb38fe --- /dev/null +++ b/docs/plans/2026-02-21-ftms-design.md @@ -0,0 +1,166 @@ +# FTMS — File/Text Management System + +**Date:** 2026-02-21 +**Status:** Approved +**Author:** markus (modpunk) + +## Purpose + +Add the ability to upload files (documents, images, audio, video) through the ZeroClaw web chat UI, store them on the Pi's SD card organized by date, ingest their text content into a searchable SQLite FTS5 index, and use Claude to generate descriptions of non-text media (images, audio, video). Users can later search for uploaded files by content or find the chat session where they uploaded a file. + +## Architecture + +FTMS is implemented as a new Rust module at `src/ftms/` that integrates with the existing gateway (Axum router) and memory (SQLite) systems. + +### Module Structure + +``` +src/ftms/ +├── mod.rs # Public API, route registration, FTMS init +├── storage.rs # File system storage (date-organized directories) +├── index.rs # SQLite FTS5 full-text search index +├── extract.rs # Text extraction from various file types +├── describe.rs # AI-powered description of non-text media +└── schema.rs # Data types: FileRecord, FileMetadata, UploadRequest +``` + +### Data Flow + +``` +Upload request (multipart/form-data) + → gateway /upload route (auth check) + → storage.rs: save file to ~/.zeroclaw/files/YYYY/MM/DD/{uuid}.{ext} + → extract.rs: extract text (PDF→text, DOCX→text, plain text passthrough) + → describe.rs: if image/audio/video, call Claude to describe content + → index.rs: insert into SQLite FTS5 table (filename, extracted text, AI description, metadata) + → Return FileRecord JSON to client +``` + +### Storage Layout + +Files stored under `~/.zeroclaw/files/` organized by upload date: + +``` +~/.zeroclaw/files/ +├── 2026/ +│ └── 02/ +│ └── 21/ +│ ├── a1b2c3d4.pdf +│ └── e5f6g7h8.png +``` + +### Database Schema + +New table in the existing ZeroClaw SQLite database: + +```sql +CREATE TABLE IF NOT EXISTS ftms_files ( + id TEXT PRIMARY KEY, -- UUID + filename TEXT NOT NULL, -- Original filename + mime_type TEXT NOT NULL, -- Detected MIME type + file_path TEXT NOT NULL, -- Relative path under ~/.zeroclaw/files/ + file_size INTEGER NOT NULL, -- Size in bytes + extracted_text TEXT, -- Extracted text content (nullable) + ai_description TEXT, -- AI-generated description (nullable) + session_id TEXT, -- Chat session ID for context tracking + channel TEXT, -- Which channel (web, telegram, etc.) + uploaded_at TEXT NOT NULL, -- ISO 8601 timestamp + tags TEXT -- Optional comma-separated tags +); + +CREATE VIRTUAL TABLE IF NOT EXISTS ftms_files_fts USING fts5( + filename, extracted_text, ai_description, tags, + content='ftms_files', + content_rowid='rowid' +); +``` + +### Gateway Routes + +Added to the existing Axum router in `src/gateway/mod.rs`: + +| Method | Path | Auth | Body | Description | +|--------|------|------|------|-------------| +| POST | /upload | Bearer token | multipart/form-data | Upload a file | +| GET | /files | Bearer token | — | List files (paginated, filterable) | +| GET | /files/:id | Bearer token | — | Get file metadata | +| GET | /files/:id/download | Bearer token | — | Download file content | +| GET | /files/search | Bearer token | ?q=query | Full-text search | + +### Body Size Limit + +The existing gateway enforces a 64KB body limit. FTMS needs a separate limit for the upload route: +- **Upload route**: 50MB max (configurable via config.toml) +- **All other routes**: keep existing 64KB limit + +### Text Extraction Strategy + +| File Type | Method | +|-----------|--------| +| .txt, .md, .csv, .json, .xml | Direct read (UTF-8) | +| .pdf | `pdf-extract` crate or shell out to `pdftotext` | +| .docx | `docx-rs` crate (XML-based, pure Rust) | +| .png, .jpg, .gif, .webp | AI description via Claude vision | +| .mp3, .wav, .ogg | AI description (metadata extraction + optional transcription) | +| .mp4, .webm | AI description (extract keyframe + describe) | + +For the initial implementation, focus on: plain text files, images (Claude vision), and PDF. Other formats can be added incrementally. + +### AI Description + +For non-text files (images, audio, video), FTMS calls the existing provider system to generate a description: + +1. Image: Send to Claude with "Describe this image in detail" prompt +2. Audio/Video: Extract metadata (duration, codec), note as "audio/video file" with metadata + +This uses the existing `providers::Provider` trait already in ZeroClaw. + +### Session Context Tracking + +Each upload records: +- `session_id`: The chat session UUID (from webhook request context) +- `channel`: Which interface ("web", "telegram", etc.) +- `uploaded_at`: Precise timestamp + +This allows users to find uploads by chat context: "I uploaded a file during that conversation about X" → search files → find session_id → retrieve chat history. + +### Web UI Changes + +Add to `index.html`: +- Paperclip/attachment icon next to the message input +- File picker dialog (accept all file types) +- Upload progress indicator +- Thumbnail preview for images +- File message bubble showing filename, size, and AI description + +### Proxy Changes + +Add to `server.py`: +- Pass-through for `/upload` (multipart, increased body limit) +- Pass-through for `/files`, `/files/:id`, `/files/:id/download`, `/files/search` + +### Config + +New section in `~/.zeroclaw/config.toml`: + +```toml +[ftms] +enabled = true +max_upload_size_mb = 50 +storage_dir = ~/.zeroclaw/files +auto_describe = true # Use AI to describe non-text files +``` + +## Trade-offs + +- **SQLite FTS5 over external search engine**: Keeps it lightweight and zero-dependency, matching ZeroClaw's philosophy. FTS5 is built into rusqlite. +- **Date-organized storage over content-addressed**: Simpler to browse manually, easier to backup/prune by date. +- **AI description async**: Description can happen after upload returns, so the user isn't blocked waiting for Claude to describe their image. + +## Success Criteria + +1. User can upload a file from web UI and it appears in `~/.zeroclaw/files/` +2. Text files are searchable by content via `/files/search?q=...` +3. Images get an AI-generated description stored in the index +4. User can find which chat session a file was uploaded in +5. Files persist across reboots From 4431fc9503ac061b24da9b80e3bb1e41196ddd4e Mon Sep 17 00:00:00 2001 From: modpunk Date: Sat, 21 Feb 2026 23:13:29 +0000 Subject: [PATCH 07/14] docs: add FTMS implementation plan (11 tasks) Detailed step-by-step plan covering config, schema, storage, index, extraction, description, gateway routes, web UI, and proxy changes. Co-Authored-By: Claude Opus 4.6 --- docs/plans/2026-02-21-ftms-implementation.md | 1275 ++++++++++++++++++ 1 file changed, 1275 insertions(+) create mode 100644 docs/plans/2026-02-21-ftms-implementation.md diff --git a/docs/plans/2026-02-21-ftms-implementation.md b/docs/plans/2026-02-21-ftms-implementation.md new file mode 100644 index 0000000000..a23099931f --- /dev/null +++ b/docs/plans/2026-02-21-ftms-implementation.md @@ -0,0 +1,1275 @@ +# FTMS Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Add file upload, storage, text extraction, AI description, and full-text search to ZeroClaw as a new Rust module. + +**Architecture:** New `src/ftms/` module with its own SQLite database (`ftms.db`), integrated into the existing Axum gateway router. Files stored on disk under `~/.zeroclaw/files/YYYY/MM/DD/`, metadata and extracted text indexed in FTS5 for search. Non-text files get AI-generated descriptions via the existing provider system. + +**Tech Stack:** Rust, rusqlite (bundled SQLite + FTS5), axum (multipart uploads), tokio (async fs), existing ZeroClaw config/provider systems. + +--- + +### Task 1: Config — Add `[ftms]` Section + +**Files:** +- Modify: `src/config/schema.rs` (add FtmsConfig struct + field on Config) +- Modify: `src/config/mod.rs` (re-export FtmsConfig) + +**Step 1: Add FtmsConfig struct to schema.rs** + +In `src/config/schema.rs`, add after `MultimodalConfig`: + +```rust +fn default_ftms_max_upload_size_mb() -> usize { 50 } +fn default_ftms_storage_dir() -> String { "~/.zeroclaw/files".to_string() } + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +pub struct FtmsConfig { + #[serde(default)] + pub enabled: bool, + #[serde(default = "default_ftms_max_upload_size_mb")] + pub max_upload_size_mb: usize, + #[serde(default = "default_ftms_storage_dir")] + pub storage_dir: String, + #[serde(default = "default_true")] + pub auto_describe: bool, +} + +impl Default for FtmsConfig { + fn default() -> Self { + Self { + enabled: false, + max_upload_size_mb: 50, + storage_dir: default_ftms_storage_dir(), + auto_describe: true, + } + } +} +``` + +Note: If `default_true` doesn't already exist, add: `fn default_true() -> bool { true }` + +**Step 2: Add ftms field to Config struct** + +In the `Config` struct (same file), add: + +```rust +#[serde(default)] +pub ftms: FtmsConfig, +``` + +**Step 3: Re-export in mod.rs** + +In `src/config/mod.rs`, add `FtmsConfig` to the use/re-export list. + +**Step 4: Verify it compiles** + +Run: `cargo check 2>&1 | tail -5` +Expected: no errors + +**Step 5: Commit** + +```bash +git add src/config/schema.rs src/config/mod.rs +git commit -m "feat(ftms): add [ftms] config section" +``` + +--- + +### Task 2: Schema — Define FTMS Data Types + +**Files:** +- Create: `src/ftms/schema.rs` +- Create: `src/ftms/mod.rs` +- Modify: `src/lib.rs` (declare module) + +**Step 1: Create src/ftms/mod.rs** + +```rust +//! FTMS — File/Text Management System +//! +//! Handles file upload, storage, text extraction, AI description, +//! and full-text search indexing. + +pub mod schema; + +pub use schema::{FileRecord, FileMetadata}; +``` + +**Step 2: Create src/ftms/schema.rs** + +```rust +use serde::{Deserialize, Serialize}; + +/// A stored file record with metadata and extracted content. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FileRecord { + pub id: String, + pub filename: String, + pub mime_type: String, + pub file_path: String, + pub file_size: u64, + pub extracted_text: Option, + pub ai_description: Option, + pub session_id: Option, + pub channel: Option, + pub uploaded_at: String, + pub tags: Option, +} + +/// Metadata sent with an upload request (not the file bytes themselves). +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FileMetadata { + pub session_id: Option, + pub channel: Option, + pub tags: Option, +} + +/// Search result with relevance score. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FileSearchResult { + pub file: FileRecord, + pub rank: f64, +} + +/// Paginated list response. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FileListResponse { + pub files: Vec, + pub total: usize, + pub offset: usize, + pub limit: usize, +} +``` + +**Step 3: Declare module in lib.rs** + +In `src/lib.rs`, add alphabetically: + +```rust +pub(crate) mod ftms; +``` + +**Step 4: Verify it compiles** + +Run: `cargo check 2>&1 | tail -5` +Expected: no errors (warnings about unused are OK) + +**Step 5: Commit** + +```bash +git add src/ftms/ src/lib.rs +git commit -m "feat(ftms): add schema types and module skeleton" +``` + +--- + +### Task 3: Storage — File System Operations + +**Files:** +- Create: `src/ftms/storage.rs` +- Modify: `src/ftms/mod.rs` (add pub mod) + +**Step 1: Create src/ftms/storage.rs** + +```rust +use anyhow::{Context, Result}; +use chrono::Local; +use std::path::{Path, PathBuf}; +use tokio::fs; +use uuid::Uuid; + +/// Manages file storage on disk, organized by date. +pub struct FileStorage { + base_dir: PathBuf, +} + +impl FileStorage { + pub fn new(base_dir: &str) -> Result { + let expanded = shellexpand::tilde(base_dir).to_string(); + let base = PathBuf::from(expanded); + Ok(Self { base_dir: base }) + } + + /// Store file bytes, returns (relative_path, absolute_path). + pub async fn store( + &self, + original_filename: &str, + data: &[u8], + ) -> Result<(String, PathBuf)> { + let now = Local::now(); + let date_dir = now.format("%Y/%m/%d").to_string(); + let abs_dir = self.base_dir.join(&date_dir); + fs::create_dir_all(&abs_dir) + .await + .context("Failed to create date directory")?; + + let ext = Path::new(original_filename) + .extension() + .and_then(|e| e.to_str()) + .unwrap_or("bin"); + let file_id = Uuid::new_v4().to_string(); + let stored_name = format!("{}.{}", file_id, ext); + + let abs_path = abs_dir.join(&stored_name); + fs::write(&abs_path, data) + .await + .context("Failed to write file")?; + + let rel_path = format!("{}/{}", date_dir, stored_name); + Ok((rel_path, abs_path)) + } + + /// Read file bytes by relative path. + pub async fn read(&self, rel_path: &str) -> Result> { + let abs = self.base_dir.join(rel_path); + fs::read(&abs).await.context("Failed to read file") + } + + /// Delete a file by relative path. + pub async fn delete(&self, rel_path: &str) -> Result<()> { + let abs = self.base_dir.join(rel_path); + if abs.exists() { + fs::remove_file(&abs).await.context("Failed to delete file")?; + } + Ok(()) + } + + /// Get absolute path for a relative path. + pub fn absolute_path(&self, rel_path: &str) -> PathBuf { + self.base_dir.join(rel_path) + } +} +``` + +**Step 2: Add to mod.rs** + +In `src/ftms/mod.rs`, add: +```rust +pub mod storage; +``` + +**Step 3: Verify it compiles** + +Run: `cargo check 2>&1 | tail -5` + +**Step 4: Commit** + +```bash +git add src/ftms/storage.rs src/ftms/mod.rs +git commit -m "feat(ftms): add file storage with date-organized directories" +``` + +--- + +### Task 4: Index — SQLite FTS5 Search Database + +**Files:** +- Create: `src/ftms/index.rs` +- Modify: `src/ftms/mod.rs` + +**Step 1: Create src/ftms/index.rs** + +```rust +use super::schema::{FileRecord, FileSearchResult, FileListResponse}; +use anyhow::{Context, Result}; +use parking_lot::Mutex; +use rusqlite::{params, Connection}; +use std::path::Path; +use std::sync::Arc; + +/// SQLite-backed file index with FTS5 full-text search. +pub struct FileIndex { + conn: Arc>, +} + +impl FileIndex { + pub fn new(workspace_dir: &Path) -> Result { + let db_dir = workspace_dir.join("ftms"); + std::fs::create_dir_all(&db_dir)?; + let db_path = db_dir.join("ftms.db"); + let conn = Connection::open(&db_path) + .context("Failed to open ftms.db")?; + + conn.execute_batch( + "PRAGMA journal_mode = WAL; + PRAGMA synchronous = NORMAL; + PRAGMA cache_size = -2000; + PRAGMA temp_store = MEMORY;", + )?; + + Self::init_schema(&conn)?; + Ok(Self { conn: Arc::new(Mutex::new(conn)) }) + } + + fn init_schema(conn: &Connection) -> Result<()> { + conn.execute_batch( + "CREATE TABLE IF NOT EXISTS ftms_files ( + id TEXT PRIMARY KEY, + filename TEXT NOT NULL, + mime_type TEXT NOT NULL, + file_path TEXT NOT NULL, + file_size INTEGER NOT NULL, + extracted_text TEXT, + ai_description TEXT, + session_id TEXT, + channel TEXT, + uploaded_at TEXT NOT NULL, + tags TEXT + ); + + CREATE INDEX IF NOT EXISTS idx_ftms_session ON ftms_files(session_id); + CREATE INDEX IF NOT EXISTS idx_ftms_uploaded ON ftms_files(uploaded_at); + CREATE INDEX IF NOT EXISTS idx_ftms_mime ON ftms_files(mime_type); + + CREATE VIRTUAL TABLE IF NOT EXISTS ftms_fts USING fts5( + filename, extracted_text, ai_description, tags, + content='ftms_files', content_rowid='rowid' + ); + + CREATE TRIGGER IF NOT EXISTS ftms_ai AFTER INSERT ON ftms_files BEGIN + INSERT INTO ftms_fts(rowid, filename, extracted_text, ai_description, tags) + VALUES (new.rowid, new.filename, new.extracted_text, new.ai_description, new.tags); + END; + + CREATE TRIGGER IF NOT EXISTS ftms_ad AFTER DELETE ON ftms_files BEGIN + INSERT INTO ftms_fts(ftms_fts, rowid, filename, extracted_text, ai_description, tags) + VALUES ('delete', old.rowid, old.filename, old.extracted_text, old.ai_description, old.tags); + END; + + CREATE TRIGGER IF NOT EXISTS ftms_au AFTER UPDATE ON ftms_files BEGIN + INSERT INTO ftms_fts(ftms_fts, rowid, filename, extracted_text, ai_description, tags) + VALUES ('delete', old.rowid, old.filename, old.extracted_text, old.ai_description, old.tags); + INSERT INTO ftms_fts(rowid, filename, extracted_text, ai_description, tags) + VALUES (new.rowid, new.filename, new.extracted_text, new.ai_description, new.tags); + END;", + ).context("Failed to init FTMS schema")?; + Ok(()) + } + + /// Insert a new file record. + pub fn insert(&self, record: &FileRecord) -> Result<()> { + let conn = self.conn.lock(); + conn.execute( + "INSERT INTO ftms_files (id, filename, mime_type, file_path, file_size, + extracted_text, ai_description, session_id, channel, uploaded_at, tags) + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11)", + params![ + record.id, record.filename, record.mime_type, record.file_path, + record.file_size, record.extracted_text, record.ai_description, + record.session_id, record.channel, record.uploaded_at, record.tags, + ], + ).context("Failed to insert file record")?; + Ok(()) + } + + /// Update extracted text and AI description (for async processing). + pub fn update_content(&self, id: &str, text: Option<&str>, description: Option<&str>) -> Result<()> { + let conn = self.conn.lock(); + conn.execute( + "UPDATE ftms_files SET extracted_text = ?1, ai_description = ?2 WHERE id = ?3", + params![text, description, id], + ).context("Failed to update file content")?; + Ok(()) + } + + /// Get a file record by ID. + pub fn get(&self, id: &str) -> Result> { + let conn = self.conn.lock(); + let mut stmt = conn.prepare( + "SELECT id, filename, mime_type, file_path, file_size, extracted_text, + ai_description, session_id, channel, uploaded_at, tags + FROM ftms_files WHERE id = ?1", + )?; + let result = stmt.query_row(params![id], |row| { + Ok(FileRecord { + id: row.get(0)?, + filename: row.get(1)?, + mime_type: row.get(2)?, + file_path: row.get(3)?, + file_size: row.get::<_, i64>(4)? as u64, + extracted_text: row.get(5)?, + ai_description: row.get(6)?, + session_id: row.get(7)?, + channel: row.get(8)?, + uploaded_at: row.get(9)?, + tags: row.get(10)?, + }) + }); + match result { + Ok(r) => Ok(Some(r)), + Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None), + Err(e) => Err(e.into()), + } + } + + /// List files with pagination, optionally filtered by session_id or mime_type. + pub fn list( + &self, + offset: usize, + limit: usize, + session_id: Option<&str>, + mime_prefix: Option<&str>, + ) -> Result { + let conn = self.conn.lock(); + + // Build dynamic query + let (where_sql, count_params, query_params) = Self::build_filter( + session_id, mime_prefix, offset, limit, + ); + + let count: usize = conn.query_row( + &format!("SELECT COUNT(*) FROM ftms_files {}", where_sql), + rusqlite::params_from_iter(&count_params), + |row| row.get(0), + )?; + + let sql = format!( + "SELECT id, filename, mime_type, file_path, file_size, extracted_text, + ai_description, session_id, channel, uploaded_at, tags + FROM ftms_files {} ORDER BY uploaded_at DESC LIMIT ? OFFSET ?", + where_sql, + ); + + let mut stmt = conn.prepare(&sql)?; + let rows = stmt.query_map( + rusqlite::params_from_iter(&query_params), + Self::row_to_record, + )?; + + let files: Vec = rows.filter_map(|r| r.ok()).collect(); + Ok(FileListResponse { files, total: count, offset, limit }) + } + + /// Full-text search using FTS5. + pub fn search(&self, query: &str, limit: usize) -> Result> { + let conn = self.conn.lock(); + let mut stmt = conn.prepare( + "SELECT f.id, f.filename, f.mime_type, f.file_path, f.file_size, + f.extracted_text, f.ai_description, f.session_id, f.channel, + f.uploaded_at, f.tags, ftms_fts.rank + FROM ftms_fts + JOIN ftms_files f ON f.rowid = ftms_fts.rowid + WHERE ftms_fts MATCH ?1 + ORDER BY rank + LIMIT ?2", + )?; + let rows = stmt.query_map(params![query, limit as i64], |row| { + Ok(FileSearchResult { + file: FileRecord { + id: row.get(0)?, + filename: row.get(1)?, + mime_type: row.get(2)?, + file_path: row.get(3)?, + file_size: row.get::<_, i64>(4)? as u64, + extracted_text: row.get(5)?, + ai_description: row.get(6)?, + session_id: row.get(7)?, + channel: row.get(8)?, + uploaded_at: row.get(9)?, + tags: row.get(10)?, + }, + rank: row.get(11)?, + }) + })?; + Ok(rows.filter_map(|r| r.ok()).collect()) + } + + // Helper: build WHERE clause and params for list() + fn build_filter( + session_id: Option<&str>, + mime_prefix: Option<&str>, + offset: usize, + limit: usize, + ) -> (String, Vec, Vec) { + let mut clauses = Vec::new(); + let mut count_params = Vec::new(); + let mut query_params = Vec::new(); + + if let Some(sid) = session_id { + clauses.push("session_id = ?".to_string()); + count_params.push(sid.to_string()); + query_params.push(sid.to_string()); + } + if let Some(prefix) = mime_prefix { + clauses.push("mime_type LIKE ?".to_string()); + let like = format!("{}%", prefix); + count_params.push(like.clone()); + query_params.push(like); + } + + let where_sql = if clauses.is_empty() { + String::new() + } else { + format!("WHERE {}", clauses.join(" AND ")) + }; + + query_params.push(limit.to_string()); + query_params.push(offset.to_string()); + + (where_sql, count_params, query_params) + } + + fn row_to_record(row: &rusqlite::Row) -> rusqlite::Result { + Ok(FileRecord { + id: row.get(0)?, + filename: row.get(1)?, + mime_type: row.get(2)?, + file_path: row.get(3)?, + file_size: row.get::<_, i64>(4)? as u64, + extracted_text: row.get(5)?, + ai_description: row.get(6)?, + session_id: row.get(7)?, + channel: row.get(8)?, + uploaded_at: row.get(9)?, + tags: row.get(10)?, + }) + } +} +``` + +**Step 2: Add to mod.rs** + +```rust +pub mod index; +pub use index::FileIndex; +``` + +**Step 3: Verify it compiles** + +Run: `cargo check 2>&1 | tail -5` + +**Step 4: Commit** + +```bash +git add src/ftms/index.rs src/ftms/mod.rs +git commit -m "feat(ftms): add SQLite FTS5 file index" +``` + +--- + +### Task 5: Extract — Text Extraction from Files + +**Files:** +- Create: `src/ftms/extract.rs` +- Modify: `src/ftms/mod.rs` + +**Step 1: Create src/ftms/extract.rs** + +```rust +use anyhow::Result; + +/// Maximum text to extract (100KB) to avoid bloating the index. +const MAX_TEXT_LEN: usize = 102_400; + +/// Extract text content from a file based on its MIME type. +/// Returns None for binary/media files that need AI description instead. +pub fn extract_text(data: &[u8], mime_type: &str, _filename: &str) -> Result> { + match mime_type { + // Plain text types — direct UTF-8 decode + "text/plain" | "text/markdown" | "text/csv" | "text/html" | "text/xml" + | "application/json" | "application/xml" => { + let text = String::from_utf8_lossy(data).to_string(); + Ok(truncate_text(text)) + } + + // PDF — use pdf-extract if available + "application/pdf" => extract_pdf(data), + + // Images, audio, video — no text extraction, needs AI description + t if t.starts_with("image/") || t.starts_with("audio/") || t.starts_with("video/") => { + Ok(None) + } + + // Unknown — try as UTF-8, fall back to None + _ => { + match std::str::from_utf8(data) { + Ok(text) if !text.trim().is_empty() => Ok(truncate_text(text.to_string())), + _ => Ok(None), + } + } + } +} + +fn truncate_text(text: String) -> Option { + if text.trim().is_empty() { + return None; + } + if text.len() > MAX_TEXT_LEN { + Some(text[..MAX_TEXT_LEN].to_string()) + } else { + Some(text) + } +} + +fn extract_pdf(data: &[u8]) -> Result> { + #[cfg(feature = "pdf")] + { + match pdf_extract::extract_text_from_mem(data) { + Ok(text) => Ok(truncate_text(text)), + _ => Ok(None), + } + } + #[cfg(not(feature = "pdf"))] + { + let _ = data; + Ok(Some("[PDF document — enable pdf feature for text extraction]".to_string())) + } +} + +/// Guess MIME type from filename extension. +pub fn guess_mime_type(filename: &str) -> String { + let ext = filename.rsplit('.').next().unwrap_or("").to_lowercase(); + match ext.as_str() { + "txt" => "text/plain", + "md" | "markdown" => "text/markdown", + "csv" => "text/csv", + "json" => "application/json", + "xml" => "application/xml", + "html" | "htm" => "text/html", + "pdf" => "application/pdf", + "png" => "image/png", + "jpg" | "jpeg" => "image/jpeg", + "gif" => "image/gif", + "webp" => "image/webp", + "bmp" => "image/bmp", + "svg" => "image/svg+xml", + "mp3" => "audio/mpeg", + "wav" => "audio/wav", + "ogg" => "audio/ogg", + "mp4" => "video/mp4", + "webm" => "video/webm", + "mov" => "video/quicktime", + "zip" => "application/zip", + "tar" => "application/x-tar", + "gz" => "application/gzip", + _ => "application/octet-stream", + } + .to_string() +} +``` + +**Step 2: Add to mod.rs** + +```rust +pub mod extract; +``` + +**Step 3: Verify it compiles** + +Run: `cargo check 2>&1 | tail -5` + +**Step 4: Commit** + +```bash +git add src/ftms/extract.rs src/ftms/mod.rs +git commit -m "feat(ftms): add text extraction with MIME detection" +``` + +--- + +### Task 6: Describe — AI-Powered Media Description + +**Files:** +- Create: `src/ftms/describe.rs` +- Modify: `src/ftms/mod.rs` + +**Step 1: Create src/ftms/describe.rs** + +```rust +use anyhow::Result; +use base64::Engine; + +/// Generate an AI description for a media file. +/// For images: encode as base64 data URI using ZeroClaw's [IMAGE:] marker system. +/// For audio/video: return basic metadata description. +pub fn describe_media( + data: &[u8], + mime_type: &str, + filename: &str, +) -> Result> { + if mime_type.starts_with("image/") { + let b64 = base64::engine::general_purpose::STANDARD.encode(data); + let data_uri = format!("data:{};base64,{}", mime_type, b64); + Ok(Some(format!( + "[Uploaded image: {}]\n[IMAGE:{}]", + filename, data_uri + ))) + } else if mime_type.starts_with("audio/") { + Ok(Some(format!( + "[Uploaded audio file: {}, size: {} bytes]", + filename, + data.len() + ))) + } else if mime_type.starts_with("video/") { + Ok(Some(format!( + "[Uploaded video file: {}, size: {} bytes]", + filename, + data.len() + ))) + } else { + Ok(None) + } +} +``` + +**Step 2: Add to mod.rs** + +```rust +pub mod describe; +``` + +**Step 3: Verify it compiles** + +Run: `cargo check 2>&1 | tail -5` + +**Step 4: Commit** + +```bash +git add src/ftms/describe.rs src/ftms/mod.rs +git commit -m "feat(ftms): add AI media description generation" +``` + +--- + +### Task 7: FTMS Service — Orchestrator in mod.rs + +**Files:** +- Modify: `src/ftms/mod.rs` (add FtmsService) + +**Step 1: Update src/ftms/mod.rs with FtmsService** + +```rust +//! FTMS — File/Text Management System +//! +//! Handles file upload, storage, text extraction, AI description, +//! and full-text search indexing. + +pub mod schema; +pub mod storage; +pub mod index; +pub mod extract; +pub mod describe; + +pub use schema::{FileRecord, FileMetadata, FileSearchResult, FileListResponse}; +pub use index::FileIndex; +pub use storage::FileStorage; + +use anyhow::Result; +use chrono::Local; +use std::path::Path; +use std::sync::Arc; +use uuid::Uuid; + +/// Main FTMS service — coordinates storage, indexing, and extraction. +pub struct FtmsService { + pub storage: FileStorage, + pub index: Arc, +} + +impl FtmsService { + pub fn new(storage_dir: &str, workspace_dir: &Path) -> Result { + let storage = FileStorage::new(storage_dir)?; + let index = Arc::new(FileIndex::new(workspace_dir)?); + Ok(Self { storage, index }) + } + + /// Upload a file: store on disk, extract text, index metadata. + pub async fn upload( + &self, + filename: &str, + data: &[u8], + metadata: FileMetadata, + ) -> Result { + let id = Uuid::new_v4().to_string(); + let mime_type = extract::guess_mime_type(filename); + + // Store file on disk + let (rel_path, _abs_path) = self.storage.store(filename, data).await?; + + // Extract text content + let extracted_text = extract::extract_text(data, &mime_type, filename)?; + + // Generate AI description for media files + let ai_description = describe::describe_media(data, &mime_type, filename)?; + + let record = FileRecord { + id, + filename: filename.to_string(), + mime_type, + file_path: rel_path, + file_size: data.len() as u64, + extracted_text, + ai_description, + session_id: metadata.session_id, + channel: metadata.channel, + uploaded_at: Local::now().to_rfc3339(), + tags: metadata.tags, + }; + + // Index in SQLite + self.index.insert(&record)?; + + Ok(record) + } +} +``` + +**Step 2: Verify it compiles** + +Run: `cargo check 2>&1 | tail -5` + +**Step 3: Commit** + +```bash +git add src/ftms/mod.rs +git commit -m "feat(ftms): add FtmsService orchestrator" +``` + +--- + +### Task 8: Gateway Integration — Add FTMS Routes + +**Files:** +- Modify: `src/gateway/mod.rs` (add AppState field, routes, handlers) + +**Step 1: Add FTMS to AppState** + +Find the `AppState` struct in `src/gateway/mod.rs` and add: + +```rust +ftms: Option>, +``` + +**Step 2: Initialize FTMS in run_gateway()** + +In the `run_gateway()` function, where AppState is constructed, add FTMS initialization: + +```rust +let ftms = if config.ftms.enabled { + let workspace_dir = crate::config::workspace_dir(); + match crate::ftms::FtmsService::new(&config.ftms.storage_dir, &workspace_dir) { + Ok(svc) => { + tracing::info!("FTMS enabled, storage: {}", config.ftms.storage_dir); + Some(Arc::new(svc)) + } + Err(e) => { + tracing::error!("FTMS init failed: {e}"); + None + } + } +} else { + None +}; +``` + +Add `ftms` to the AppState construction. + +Note: Check how `workspace_dir` is obtained in run_gateway() — it likely uses `directories::ProjectDirs` or a config path. Match the existing pattern. + +**Step 3: Add routes** + +In the Router::new() chain, add FTMS routes. The /upload route needs a larger body limit. Use axum's nested router approach: + +```rust +// Upload route with higher body limit (50MB) +let upload_router = Router::new() + .route("/upload", post(handle_ftms_upload)) + .layer(RequestBodyLimitLayer::new( + config.ftms.max_upload_size_mb * 1024 * 1024, + )) + .with_state(state.clone()); + +// Main router (existing routes + FTMS query routes) +let app = Router::new() + .route("/health", get(handle_health)) + // ... all existing routes ... + .route("/files", get(handle_ftms_list)) + .route("/files/search", get(handle_ftms_search)) + .route("/files/{id}", get(handle_ftms_get)) + .route("/files/{id}/download", get(handle_ftms_download)) + .with_state(state) + .layer(RequestBodyLimitLayer::new(MAX_BODY_SIZE)) + .layer(TimeoutLayer::new(Duration::from_secs(REQUEST_TIMEOUT_SECS))); + +// Merge upload router (its own body limit) with main router +let app = upload_router.merge(app); +``` + +Note: Axum 0.8 uses `{id}` for path params (not `:id`). + +**Step 4: Add handler functions** + +Add these handlers to `src/gateway/mod.rs`. Each follows the same auth pattern as `handle_webhook`: + +```rust +use axum::extract::Multipart; + +// Auth helper to reduce duplication +fn check_bearer_auth(state: &AppState, headers: &HeaderMap) -> bool { + if !state.pairing.require_pairing() { + return true; + } + let auth = headers.get(header::AUTHORIZATION) + .and_then(|v| v.to_str().ok()).unwrap_or(""); + let token = auth.strip_prefix("Bearer ").unwrap_or(""); + state.pairing.is_authenticated(token) +} + +async fn handle_ftms_upload( + State(state): State, + headers: HeaderMap, + mut multipart: Multipart, +) -> impl IntoResponse { + if !check_bearer_auth(&state, &headers) { + return (StatusCode::UNAUTHORIZED, + Json(serde_json::json!({"error": "Unauthorized"}))).into_response(); + } + + let ftms = match &state.ftms { + Some(f) => f, + None => return (StatusCode::SERVICE_UNAVAILABLE, + Json(serde_json::json!({"error": "FTMS not enabled"}))).into_response(), + }; + + let mut file_data: Option<(String, Vec)> = None; + let mut session_id: Option = None; + let mut channel: Option = None; + let mut tags: Option = None; + + while let Ok(Some(field)) = multipart.next_field().await { + let name = field.name().unwrap_or("").to_string(); + match name.as_str() { + "file" => { + let fname = field.file_name().unwrap_or("upload").to_string(); + if let Ok(bytes) = field.bytes().await { + file_data = Some((fname, bytes.to_vec())); + } + } + "session_id" => { session_id = field.text().await.ok(); } + "channel" => { channel = field.text().await.ok(); } + "tags" => { tags = field.text().await.ok(); } + _ => {} + } + } + + let (filename, data) = match file_data { + Some(d) => d, + None => return (StatusCode::BAD_REQUEST, + Json(serde_json::json!({"error": "No file field in multipart"}))).into_response(), + }; + + let metadata = crate::ftms::FileMetadata { session_id, channel, tags }; + + match ftms.upload(&filename, &data, metadata).await { + Ok(record) => (StatusCode::OK, Json(serde_json::json!(record))).into_response(), + Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()}))).into_response(), + } +} + +async fn handle_ftms_list( + State(state): State, + headers: HeaderMap, + Query(params): Query>, +) -> impl IntoResponse { + if !check_bearer_auth(&state, &headers) { + return (StatusCode::UNAUTHORIZED, + Json(serde_json::json!({"error": "Unauthorized"}))).into_response(); + } + let ftms = match &state.ftms { + Some(f) => f, + None => return (StatusCode::SERVICE_UNAVAILABLE, + Json(serde_json::json!({"error": "FTMS not enabled"}))).into_response(), + }; + let offset = params.get("offset").and_then(|v| v.parse().ok()).unwrap_or(0usize); + let limit = params.get("limit").and_then(|v| v.parse().ok()).unwrap_or(20usize); + let session_id = params.get("session_id").map(|s| s.as_str()); + let mime_prefix = params.get("type").map(|s| s.as_str()); + + match ftms.index.list(offset, limit, session_id, mime_prefix) { + Ok(resp) => (StatusCode::OK, Json(serde_json::json!(resp))).into_response(), + Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()}))).into_response(), + } +} + +async fn handle_ftms_search( + State(state): State, + headers: HeaderMap, + Query(params): Query>, +) -> impl IntoResponse { + if !check_bearer_auth(&state, &headers) { + return (StatusCode::UNAUTHORIZED, + Json(serde_json::json!({"error": "Unauthorized"}))).into_response(); + } + let ftms = match &state.ftms { + Some(f) => f, + None => return (StatusCode::SERVICE_UNAVAILABLE, + Json(serde_json::json!({"error": "FTMS not enabled"}))).into_response(), + }; + let query = match params.get("q") { + Some(q) if !q.is_empty() => q.as_str(), + _ => return (StatusCode::BAD_REQUEST, + Json(serde_json::json!({"error": "Missing ?q= parameter"}))).into_response(), + }; + let limit = params.get("limit").and_then(|v| v.parse().ok()).unwrap_or(20usize); + + match ftms.index.search(query, limit) { + Ok(results) => (StatusCode::OK, Json(serde_json::json!(results))).into_response(), + Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()}))).into_response(), + } +} + +async fn handle_ftms_get( + State(state): State, + headers: HeaderMap, + AxumPath(id): AxumPath, +) -> impl IntoResponse { + if !check_bearer_auth(&state, &headers) { + return (StatusCode::UNAUTHORIZED, + Json(serde_json::json!({"error": "Unauthorized"}))).into_response(); + } + let ftms = match &state.ftms { + Some(f) => f, + None => return (StatusCode::SERVICE_UNAVAILABLE, + Json(serde_json::json!({"error": "FTMS not enabled"}))).into_response(), + }; + match ftms.index.get(&id) { + Ok(Some(record)) => (StatusCode::OK, Json(serde_json::json!(record))).into_response(), + Ok(None) => (StatusCode::NOT_FOUND, + Json(serde_json::json!({"error": "File not found"}))).into_response(), + Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()}))).into_response(), + } +} + +async fn handle_ftms_download( + State(state): State, + headers: HeaderMap, + AxumPath(id): AxumPath, +) -> impl IntoResponse { + if !check_bearer_auth(&state, &headers) { + return (StatusCode::UNAUTHORIZED, + Json(serde_json::json!({"error": "Unauthorized"}))).into_response(); + } + let ftms = match &state.ftms { + Some(f) => f, + None => return (StatusCode::SERVICE_UNAVAILABLE, + Json(serde_json::json!({"error": "FTMS not enabled"}))).into_response(), + }; + let record = match ftms.index.get(&id) { + Ok(Some(r)) => r, + Ok(None) => return (StatusCode::NOT_FOUND, + Json(serde_json::json!({"error": "File not found"}))).into_response(), + Err(e) => return (StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()}))).into_response(), + }; + match ftms.storage.read(&record.file_path).await { + Ok(data) => { + let headers = [ + (header::CONTENT_TYPE, record.mime_type), + (header::CONTENT_DISPOSITION, format!("attachment; filename=\"{}\"", record.filename)), + ]; + (StatusCode::OK, headers, data).into_response() + } + Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()}))).into_response(), + } +} +``` + +**Step 5: Check if axum multipart feature is enabled** + +In `Cargo.toml`, verify axum features include `"multipart"`. If not, add it: +```toml +axum = { version = "0.8", default-features = false, features = ["http1", "json", "tokio", "query", "ws", "macros", "multipart"] } +``` + +**Step 6: Verify it compiles** + +Run: `cargo check 2>&1 | tail -20` +Fix compilation errors iteratively. + +**Step 7: Commit** + +```bash +git add src/gateway/mod.rs src/ftms/ Cargo.toml +git commit -m "feat(ftms): integrate FTMS routes into gateway" +``` + +--- + +### Task 9: Web UI — File Upload Button + +**Files:** +- Modify: `~/zeroclaw-web/index.html` + +**Step 1: Add hidden file input and upload button** + +Add to the message input area (next to send button): + +```html + + +``` + +**Step 2: Add upload JavaScript** + +```javascript +document.getElementById('attachBtn').onclick = () => { + document.getElementById('fileInput').click(); +}; + +document.getElementById('fileInput').onchange = async (e) => { + const file = e.target.files[0]; + if (!file) return; + const formData = new FormData(); + formData.append('file', file); + formData.append('channel', 'web'); + + const resp = await fetch('/upload', { + method: 'POST', + headers: { 'Authorization': 'Bearer ' + token }, + body: formData, + }); + const result = await resp.json(); + // Display file message in chat + addFileMessage(result); + e.target.value = ''; +}; +``` + +**Step 3: Add file message bubble rendering** + +Add a `addFileMessage()` function that creates a chat bubble showing: +- Filename and size +- Thumbnail for images (using `/files/{id}/download` as src) +- AI description if available + +**Step 4: Commit** + +```bash +git add ~/zeroclaw-web/index.html +git commit -m "feat(ftms): add file upload UI to web chat" +``` + +--- + +### Task 10: Proxy — Pass-Through for FTMS Routes + +**Files:** +- Modify: `~/zeroclaw-web/server.py` + +**Step 1: Add /upload handling to do_POST** + +In `do_POST`, add: +```python +elif self.path == "/upload": + self._handle_upload() +``` + +Add `_handle_upload()` method that reads the raw body and forwards it to `GATEWAY + "/upload"` with the same Content-Type header (multipart boundary must be preserved). + +**Step 2: Add /files routes to do_GET** + +In `do_GET`, add: +```python +elif self.path.startswith("/files"): + self._proxy_get() +``` + +The existing `_proxy_get` already forwards to `GATEWAY + self.path`, so this should work as-is. + +**Step 3: Commit** + +```bash +git add ~/zeroclaw-web/server.py +git commit -m "feat(ftms): add proxy pass-through for FTMS routes" +``` + +--- + +### Task 11: Enable, Build, Deploy, Test + +**Step 1: Enable FTMS in config** + +```bash +# SSH to Pi and add to ~/.zeroclaw/config.toml: +echo -e '\n[ftms]\nenabled = true' >> ~/.zeroclaw/config.toml +``` + +**Step 2: Build on Pi** + +```bash +cd ~/zeroclaw && cargo build --release 2>&1 | tail -10 +``` + +Note: Building on RPi4 8GB will take 10-30 minutes for a full build. Incremental builds are faster. + +**Step 3: Restart services** + +```bash +systemctl --user restart zeroclaw +systemctl --user restart zeroclaw-web +``` + +**Step 4: Test upload via curl** + +```bash +echo "Hello FTMS" > /tmp/test.txt +curl -X POST http://localhost:42617/upload \ + -F "file=@/tmp/test.txt" \ + -F "channel=cli" \ + -F "session_id=test-session" +``` + +Expected: JSON response with FileRecord including `extracted_text: "Hello FTMS"` + +**Step 5: Test search** + +```bash +curl "http://localhost:42617/files/search?q=Hello" +``` + +Expected: JSON array with the test file + +**Step 6: Test list** + +```bash +curl "http://localhost:42617/files" +``` + +**Step 7: Test download** + +```bash +curl "http://localhost:42617/files/{id-from-upload}/download" -o /tmp/downloaded.txt +diff /tmp/test.txt /tmp/downloaded.txt +``` + +**Step 8: Test web UI upload** + +Open `http://192.168.0.14:8081` in browser, pair, click paperclip, upload a file. + +**Step 9: Push to fork** + +```bash +git push origin main +``` + +--- + +## Dependency Notes + +No new Cargo dependencies needed except potentially enabling the `multipart` feature on axum. All required crates already in Cargo.toml: +- `rusqlite` (bundled) — SQLite + FTS5 +- `axum` — HTTP routes + multipart +- `tokio` — async file I/O +- `uuid` — file IDs +- `chrono` — timestamps +- `base64` — image encoding +- `serde`/`serde_json` — serialization +- `shellexpand` — tilde expansion +- `parking_lot` — fast mutexes From 77c827526db7eea1eaa7a14f421872bcb350db1a Mon Sep 17 00:00:00 2001 From: modpunk Date: Sat, 21 Feb 2026 23:42:18 +0000 Subject: [PATCH 08/14] feat(ftms): add [ftms] config section --- src/config/mod.rs | 2 +- src/config/schema.rs | 41 +++++++++++++++++++++++++++++++++++++++++ src/onboard/wizard.rs | 2 ++ 3 files changed, 44 insertions(+), 1 deletion(-) diff --git a/src/config/mod.rs b/src/config/mod.rs index c40053d453..5243e4d127 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -8,7 +8,7 @@ pub use schema::{ ChannelsConfig, ClassificationRule, ComposioConfig, Config, CostConfig, CronConfig, DelegateAgentConfig, DiscordConfig, DockerRuntimeConfig, EmbeddingRouteConfig, GatewayConfig, HardwareConfig, HardwareTransport, HeartbeatConfig, HttpRequestConfig, IMessageConfig, - IdentityConfig, LarkConfig, MatrixConfig, MemoryConfig, ModelRouteConfig, MultimodalConfig, + IdentityConfig, LarkConfig, MatrixConfig, MemoryConfig, ModelRouteConfig, MultimodalConfig, FtmsConfig, NextcloudTalkConfig, ObservabilityConfig, PeripheralBoardConfig, PeripheralsConfig, ProxyConfig, ProxyScope, QueryClassificationConfig, ReliabilityConfig, ResourceLimitsConfig, RuntimeConfig, SandboxBackend, SandboxConfig, SchedulerConfig, SecretsConfig, SecurityConfig, diff --git a/src/config/schema.rs b/src/config/schema.rs index cb7ad82f1d..892eb190c8 100644 --- a/src/config/schema.rs +++ b/src/config/schema.rs @@ -159,6 +159,10 @@ pub struct Config { #[serde(default)] pub multimodal: MultimodalConfig, + /// FTMS (File/Text Management System) configuration (`[ftms]`). + #[serde(default)] + pub ftms: FtmsConfig, + /// Web search tool configuration (`[web_search]`). #[serde(default)] pub web_search: WebSearchConfig, @@ -430,6 +434,42 @@ impl Default for MultimodalConfig { } } + +// ── FTMS (File/Text Management System) ────────────────────────── + +fn default_ftms_max_upload_size_mb() -> usize { 50 } +fn default_ftms_storage_dir() -> String { "~/.zeroclaw/files".to_string() } + +/// FTMS configuration (`[ftms]` section). +/// +/// Controls file upload, storage, text extraction, and full-text search. +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +pub struct FtmsConfig { + /// Enable the FTMS subsystem. Default: false. + #[serde(default)] + pub enabled: bool, + /// Maximum upload size in megabytes. Default: 50. + #[serde(default = "default_ftms_max_upload_size_mb")] + pub max_upload_size_mb: usize, + /// Base directory for file storage. Default: ~/.zeroclaw/files. + #[serde(default = "default_ftms_storage_dir")] + pub storage_dir: String, + /// Automatically generate AI descriptions for media files. Default: true. + #[serde(default = "default_true")] + pub auto_describe: bool, +} + +impl Default for FtmsConfig { + fn default() -> Self { + Self { + enabled: false, + max_upload_size_mb: 50, + storage_dir: default_ftms_storage_dir(), + auto_describe: true, + } + } +} + // ── Identity (AIEOS / OpenClaw format) ────────────────────────── /// Identity format configuration (`[identity]` section). @@ -2841,6 +2881,7 @@ impl Default for Config { browser: BrowserConfig::default(), http_request: HttpRequestConfig::default(), multimodal: MultimodalConfig::default(), + ftms: FtmsConfig::default(), web_search: WebSearchConfig::default(), proxy: ProxyConfig::default(), identity: IdentityConfig::default(), diff --git a/src/onboard/wizard.rs b/src/onboard/wizard.rs index 8866084988..e577105994 100644 --- a/src/onboard/wizard.rs +++ b/src/onboard/wizard.rs @@ -177,6 +177,7 @@ pub async fn run_wizard(force: bool) -> Result { browser: BrowserConfig::default(), http_request: crate::config::HttpRequestConfig::default(), multimodal: crate::config::MultimodalConfig::default(), + ftms: crate::config::FtmsConfig::default(), web_search: crate::config::WebSearchConfig::default(), proxy: crate::config::ProxyConfig::default(), identity: crate::config::IdentityConfig::default(), @@ -420,6 +421,7 @@ async fn run_quick_setup_with_home( browser: BrowserConfig::default(), http_request: crate::config::HttpRequestConfig::default(), multimodal: crate::config::MultimodalConfig::default(), + ftms: crate::config::FtmsConfig::default(), web_search: crate::config::WebSearchConfig::default(), proxy: crate::config::ProxyConfig::default(), identity: crate::config::IdentityConfig::default(), From ece7ff6ee8d5d0f2a077de671ba0b18cba22ae84 Mon Sep 17 00:00:00 2001 From: modpunk Date: Sat, 21 Feb 2026 23:44:22 +0000 Subject: [PATCH 09/14] feat(ftms): add schema types and module skeleton --- src/ftms/mod.rs | 8 ++++++++ src/ftms/schema.rs | 41 +++++++++++++++++++++++++++++++++++++++++ src/lib.rs | 1 + 3 files changed, 50 insertions(+) create mode 100644 src/ftms/mod.rs create mode 100644 src/ftms/schema.rs diff --git a/src/ftms/mod.rs b/src/ftms/mod.rs new file mode 100644 index 0000000000..db74fb8940 --- /dev/null +++ b/src/ftms/mod.rs @@ -0,0 +1,8 @@ +//! FTMS — File/Text Management System +//! +//! Handles file upload, storage, text extraction, AI description, +//! and full-text search indexing. + +pub mod schema; + +pub use schema::{FileRecord, FileMetadata}; diff --git a/src/ftms/schema.rs b/src/ftms/schema.rs new file mode 100644 index 0000000000..1601cbd32c --- /dev/null +++ b/src/ftms/schema.rs @@ -0,0 +1,41 @@ +use serde::{Deserialize, Serialize}; + +/// A stored file record with metadata and extracted content. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FileRecord { + pub id: String, + pub filename: String, + pub mime_type: String, + pub file_path: String, + pub file_size: u64, + pub extracted_text: Option, + pub ai_description: Option, + pub session_id: Option, + pub channel: Option, + pub uploaded_at: String, + pub tags: Option, +} + +/// Metadata sent with an upload request (not the file bytes themselves). +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FileMetadata { + pub session_id: Option, + pub channel: Option, + pub tags: Option, +} + +/// Search result with relevance score. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FileSearchResult { + pub file: FileRecord, + pub rank: f64, +} + +/// Paginated list response. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FileListResponse { + pub files: Vec, + pub total: usize, + pub offset: usize, + pub limit: usize, +} diff --git a/src/lib.rs b/src/lib.rs index bf673e4a24..c294bc0741 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -47,6 +47,7 @@ pub(crate) mod cost; pub(crate) mod cron; pub(crate) mod daemon; pub(crate) mod doctor; +pub(crate) mod ftms; pub mod gateway; pub(crate) mod hardware; pub(crate) mod health; From 65c7487679c2dd607af008e47ad0183d2ab01d4b Mon Sep 17 00:00:00 2001 From: modpunk Date: Sat, 21 Feb 2026 23:58:29 +0000 Subject: [PATCH 10/14] feat(ftms): add file storage with date-organized directories --- src/ftms/mod.rs | 1 + src/ftms/storage.rs | 67 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 68 insertions(+) create mode 100644 src/ftms/storage.rs diff --git a/src/ftms/mod.rs b/src/ftms/mod.rs index db74fb8940..e63252bffd 100644 --- a/src/ftms/mod.rs +++ b/src/ftms/mod.rs @@ -4,5 +4,6 @@ //! and full-text search indexing. pub mod schema; +pub mod storage; pub use schema::{FileRecord, FileMetadata}; diff --git a/src/ftms/storage.rs b/src/ftms/storage.rs new file mode 100644 index 0000000000..f8484df9a8 --- /dev/null +++ b/src/ftms/storage.rs @@ -0,0 +1,67 @@ +use anyhow::{Context, Result}; +use chrono::Local; +use std::path::{Path, PathBuf}; +use tokio::fs; +use uuid::Uuid; + +/// Manages file storage on disk, organized by date. +pub struct FileStorage { + base_dir: PathBuf, +} + +impl FileStorage { + pub fn new(base_dir: &str) -> Result { + let expanded = shellexpand::tilde(base_dir).to_string(); + let base = PathBuf::from(expanded); + Ok(Self { base_dir: base }) + } + + /// Store file bytes, returns (relative_path, absolute_path). + pub async fn store( + &self, + original_filename: &str, + data: &[u8], + ) -> Result<(String, PathBuf)> { + let now = Local::now(); + let date_dir = now.format("%Y/%m/%d").to_string(); + let abs_dir = self.base_dir.join(&date_dir); + fs::create_dir_all(&abs_dir) + .await + .context("Failed to create date directory")?; + + let ext = Path::new(original_filename) + .extension() + .and_then(|e| e.to_str()) + .unwrap_or("bin"); + let file_id = Uuid::new_v4().to_string(); + let stored_name = format!("{}.{}", file_id, ext); + + let abs_path = abs_dir.join(&stored_name); + fs::write(&abs_path, data) + .await + .context("Failed to write file")?; + + let rel_path = format!("{}/{}", date_dir, stored_name); + Ok((rel_path, abs_path)) + } + + /// Read file bytes by relative path. + pub async fn read(&self, rel_path: &str) -> Result> { + let abs = self.base_dir.join(rel_path); + fs::read(&abs).await.context("Failed to read file") + } + + /// Delete a file by relative path. + pub async fn delete(&self, rel_path: &str) -> Result<()> { + let abs = self.base_dir.join(rel_path); + if abs.exists() { + fs::remove_file(&abs).await.context("Failed to delete file")?; + } + Ok(()) + } + + /// Get absolute path for a relative path. + pub fn absolute_path(&self, rel_path: &str) -> PathBuf { + self.base_dir.join(rel_path) + } +} From bccc33bd202a5c387602fee987c47a00785771cb Mon Sep 17 00:00:00 2001 From: modpunk Date: Sun, 22 Feb 2026 01:11:42 +0000 Subject: [PATCH 11/14] feat(ftms): add SQLite FTS5 index, text extraction, and media description --- src/ftms/describe.rs | 34 ++++++ src/ftms/extract.rs | 90 +++++++++++++++ src/ftms/index.rs | 255 +++++++++++++++++++++++++++++++++++++++++++ src/ftms/mod.rs | 4 + 4 files changed, 383 insertions(+) create mode 100644 src/ftms/describe.rs create mode 100644 src/ftms/extract.rs create mode 100644 src/ftms/index.rs diff --git a/src/ftms/describe.rs b/src/ftms/describe.rs new file mode 100644 index 0000000000..dbfe8dc03c --- /dev/null +++ b/src/ftms/describe.rs @@ -0,0 +1,34 @@ +use anyhow::Result; +use base64::Engine; + +/// Generate an AI description for a media file. +/// For images: encode as base64 data URI using ZeroClaw's [IMAGE:] marker system. +/// For audio/video: return basic metadata description. +pub fn describe_media( + data: &[u8], + mime_type: &str, + filename: &str, +) -> Result> { + if mime_type.starts_with("image/") { + let b64 = base64::engine::general_purpose::STANDARD.encode(data); + let data_uri = format!("data:{};base64,{}", mime_type, b64); + Ok(Some(format!( + "[Uploaded image: {}]\n[IMAGE:{}]", + filename, data_uri + ))) + } else if mime_type.starts_with("audio/") { + Ok(Some(format!( + "[Uploaded audio file: {}, size: {} bytes]", + filename, + data.len() + ))) + } else if mime_type.starts_with("video/") { + Ok(Some(format!( + "[Uploaded video file: {}, size: {} bytes]", + filename, + data.len() + ))) + } else { + Ok(None) + } +} diff --git a/src/ftms/extract.rs b/src/ftms/extract.rs new file mode 100644 index 0000000000..cc21913b16 --- /dev/null +++ b/src/ftms/extract.rs @@ -0,0 +1,90 @@ +use anyhow::Result; + +/// Maximum text to extract (100KB) to avoid bloating the index. +const MAX_TEXT_LEN: usize = 102_400; + +/// Extract text content from a file based on its MIME type. +/// Returns None for binary/media files that need AI description instead. +pub fn extract_text(data: &[u8], mime_type: &str, _filename: &str) -> Result> { + match mime_type { + // Plain text types — direct UTF-8 decode + "text/plain" | "text/markdown" | "text/csv" | "text/html" | "text/xml" + | "application/json" | "application/xml" => { + let text = String::from_utf8_lossy(data).to_string(); + Ok(truncate_text(text)) + } + + // PDF — use pdf-extract if available + "application/pdf" => extract_pdf(data), + + // Images, audio, video — no text extraction, needs AI description + t if t.starts_with("image/") || t.starts_with("audio/") || t.starts_with("video/") => { + Ok(None) + } + + // Unknown — try as UTF-8, fall back to None + _ => { + match std::str::from_utf8(data) { + Ok(text) if !text.trim().is_empty() => Ok(truncate_text(text.to_string())), + _ => Ok(None), + } + } + } +} + +fn truncate_text(text: String) -> Option { + if text.trim().is_empty() { + return None; + } + if text.len() > MAX_TEXT_LEN { + Some(text[..MAX_TEXT_LEN].to_string()) + } else { + Some(text) + } +} + +fn extract_pdf(data: &[u8]) -> Result> { + #[cfg(feature = "pdf")] + { + match pdf_extract::extract_text_from_mem(data) { + Ok(text) => Ok(truncate_text(text)), + _ => Ok(None), + } + } + #[cfg(not(feature = "pdf"))] + { + let _ = data; + Ok(Some("[PDF document — enable pdf feature for text extraction]".to_string())) + } +} + +/// Guess MIME type from filename extension. +pub fn guess_mime_type(filename: &str) -> String { + let ext = filename.rsplit('.').next().unwrap_or("").to_lowercase(); + match ext.as_str() { + "txt" => "text/plain", + "md" | "markdown" => "text/markdown", + "csv" => "text/csv", + "json" => "application/json", + "xml" => "application/xml", + "html" | "htm" => "text/html", + "pdf" => "application/pdf", + "png" => "image/png", + "jpg" | "jpeg" => "image/jpeg", + "gif" => "image/gif", + "webp" => "image/webp", + "bmp" => "image/bmp", + "svg" => "image/svg+xml", + "mp3" => "audio/mpeg", + "wav" => "audio/wav", + "ogg" => "audio/ogg", + "mp4" => "video/mp4", + "webm" => "video/webm", + "mov" => "video/quicktime", + "zip" => "application/zip", + "tar" => "application/x-tar", + "gz" => "application/gzip", + _ => "application/octet-stream", + } + .to_string() +} diff --git a/src/ftms/index.rs b/src/ftms/index.rs new file mode 100644 index 0000000000..395eb3bcea --- /dev/null +++ b/src/ftms/index.rs @@ -0,0 +1,255 @@ +use super::schema::{FileRecord, FileSearchResult, FileListResponse}; +use anyhow::{Context, Result}; +use parking_lot::Mutex; +use rusqlite::{params, Connection}; +use std::path::Path; +use std::sync::Arc; + +/// SQLite-backed file index with FTS5 full-text search. +pub struct FileIndex { + conn: Arc>, +} + +impl FileIndex { + pub fn new(workspace_dir: &Path) -> Result { + let db_dir = workspace_dir.join("ftms"); + std::fs::create_dir_all(&db_dir)?; + let db_path = db_dir.join("ftms.db"); + let conn = Connection::open(&db_path) + .context("Failed to open ftms.db")?; + + conn.execute_batch( + "PRAGMA journal_mode = WAL; + PRAGMA synchronous = NORMAL; + PRAGMA cache_size = -2000; + PRAGMA temp_store = MEMORY;", + )?; + + Self::init_schema(&conn)?; + Ok(Self { conn: Arc::new(Mutex::new(conn)) }) + } + + fn init_schema(conn: &Connection) -> Result<()> { + conn.execute_batch( + "CREATE TABLE IF NOT EXISTS ftms_files ( + id TEXT PRIMARY KEY, + filename TEXT NOT NULL, + mime_type TEXT NOT NULL, + file_path TEXT NOT NULL, + file_size INTEGER NOT NULL, + extracted_text TEXT, + ai_description TEXT, + session_id TEXT, + channel TEXT, + uploaded_at TEXT NOT NULL, + tags TEXT + ); + + CREATE INDEX IF NOT EXISTS idx_ftms_session ON ftms_files(session_id); + CREATE INDEX IF NOT EXISTS idx_ftms_uploaded ON ftms_files(uploaded_at); + CREATE INDEX IF NOT EXISTS idx_ftms_mime ON ftms_files(mime_type); + + CREATE VIRTUAL TABLE IF NOT EXISTS ftms_fts USING fts5( + filename, extracted_text, ai_description, tags, + content='ftms_files', content_rowid='rowid' + ); + + CREATE TRIGGER IF NOT EXISTS ftms_ai AFTER INSERT ON ftms_files BEGIN + INSERT INTO ftms_fts(rowid, filename, extracted_text, ai_description, tags) + VALUES (new.rowid, new.filename, new.extracted_text, new.ai_description, new.tags); + END; + + CREATE TRIGGER IF NOT EXISTS ftms_ad AFTER DELETE ON ftms_files BEGIN + INSERT INTO ftms_fts(ftms_fts, rowid, filename, extracted_text, ai_description, tags) + VALUES ('delete', old.rowid, old.filename, old.extracted_text, old.ai_description, old.tags); + END; + + CREATE TRIGGER IF NOT EXISTS ftms_au AFTER UPDATE ON ftms_files BEGIN + INSERT INTO ftms_fts(ftms_fts, rowid, filename, extracted_text, ai_description, tags) + VALUES ('delete', old.rowid, old.filename, old.extracted_text, old.ai_description, old.tags); + INSERT INTO ftms_fts(rowid, filename, extracted_text, ai_description, tags) + VALUES (new.rowid, new.filename, new.extracted_text, new.ai_description, new.tags); + END;", + ).context("Failed to init FTMS schema")?; + Ok(()) + } + + /// Insert a new file record. + pub fn insert(&self, record: &FileRecord) -> Result<()> { + let conn = self.conn.lock(); + conn.execute( + "INSERT INTO ftms_files (id, filename, mime_type, file_path, file_size, + extracted_text, ai_description, session_id, channel, uploaded_at, tags) + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11)", + params![ + record.id, record.filename, record.mime_type, record.file_path, + record.file_size, record.extracted_text, record.ai_description, + record.session_id, record.channel, record.uploaded_at, record.tags, + ], + ).context("Failed to insert file record")?; + Ok(()) + } + + /// Update extracted text and AI description (for async processing). + pub fn update_content(&self, id: &str, text: Option<&str>, description: Option<&str>) -> Result<()> { + let conn = self.conn.lock(); + conn.execute( + "UPDATE ftms_files SET extracted_text = ?1, ai_description = ?2 WHERE id = ?3", + params![text, description, id], + ).context("Failed to update file content")?; + Ok(()) + } + + /// Get a file record by ID. + pub fn get(&self, id: &str) -> Result> { + let conn = self.conn.lock(); + let mut stmt = conn.prepare( + "SELECT id, filename, mime_type, file_path, file_size, extracted_text, + ai_description, session_id, channel, uploaded_at, tags + FROM ftms_files WHERE id = ?1", + )?; + let result = stmt.query_row(params![id], |row| { + Ok(FileRecord { + id: row.get(0)?, + filename: row.get(1)?, + mime_type: row.get(2)?, + file_path: row.get(3)?, + file_size: row.get::<_, i64>(4)? as u64, + extracted_text: row.get(5)?, + ai_description: row.get(6)?, + session_id: row.get(7)?, + channel: row.get(8)?, + uploaded_at: row.get(9)?, + tags: row.get(10)?, + }) + }); + match result { + Ok(r) => Ok(Some(r)), + Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None), + Err(e) => Err(e.into()), + } + } + + /// List files with pagination, optionally filtered by session_id or mime_type. + pub fn list( + &self, + offset: usize, + limit: usize, + session_id: Option<&str>, + mime_prefix: Option<&str>, + ) -> Result { + let conn = self.conn.lock(); + + // Build dynamic query + let (where_sql, count_params, query_params) = Self::build_filter( + session_id, mime_prefix, offset, limit, + ); + + let count: usize = conn.query_row( + &format!("SELECT COUNT(*) FROM ftms_files {}", where_sql), + rusqlite::params_from_iter(&count_params), + |row| row.get(0), + )?; + + let sql = format!( + "SELECT id, filename, mime_type, file_path, file_size, extracted_text, + ai_description, session_id, channel, uploaded_at, tags + FROM ftms_files {} ORDER BY uploaded_at DESC LIMIT ? OFFSET ?", + where_sql, + ); + + let mut stmt = conn.prepare(&sql)?; + let rows = stmt.query_map( + rusqlite::params_from_iter(&query_params), + Self::row_to_record, + )?; + + let files: Vec = rows.filter_map(|r| r.ok()).collect(); + Ok(FileListResponse { files, total: count, offset, limit }) + } + + /// Full-text search using FTS5. + pub fn search(&self, query: &str, limit: usize) -> Result> { + let conn = self.conn.lock(); + let mut stmt = conn.prepare( + "SELECT f.id, f.filename, f.mime_type, f.file_path, f.file_size, + f.extracted_text, f.ai_description, f.session_id, f.channel, + f.uploaded_at, f.tags, ftms_fts.rank + FROM ftms_fts + JOIN ftms_files f ON f.rowid = ftms_fts.rowid + WHERE ftms_fts MATCH ?1 + ORDER BY rank + LIMIT ?2", + )?; + let rows = stmt.query_map(params![query, limit as i64], |row| { + Ok(FileSearchResult { + file: FileRecord { + id: row.get(0)?, + filename: row.get(1)?, + mime_type: row.get(2)?, + file_path: row.get(3)?, + file_size: row.get::<_, i64>(4)? as u64, + extracted_text: row.get(5)?, + ai_description: row.get(6)?, + session_id: row.get(7)?, + channel: row.get(8)?, + uploaded_at: row.get(9)?, + tags: row.get(10)?, + }, + rank: row.get(11)?, + }) + })?; + Ok(rows.filter_map(|r| r.ok()).collect()) + } + + // Helper: build WHERE clause and params for list() + fn build_filter( + session_id: Option<&str>, + mime_prefix: Option<&str>, + offset: usize, + limit: usize, + ) -> (String, Vec, Vec) { + let mut clauses = Vec::new(); + let mut count_params = Vec::new(); + let mut query_params = Vec::new(); + + if let Some(sid) = session_id { + clauses.push("session_id = ?".to_string()); + count_params.push(sid.to_string()); + query_params.push(sid.to_string()); + } + if let Some(prefix) = mime_prefix { + clauses.push("mime_type LIKE ?".to_string()); + let like = format!("{}%", prefix); + count_params.push(like.clone()); + query_params.push(like); + } + + let where_sql = if clauses.is_empty() { + String::new() + } else { + format!("WHERE {}", clauses.join(" AND ")) + }; + + query_params.push(limit.to_string()); + query_params.push(offset.to_string()); + + (where_sql, count_params, query_params) + } + + fn row_to_record(row: &rusqlite::Row) -> rusqlite::Result { + Ok(FileRecord { + id: row.get(0)?, + filename: row.get(1)?, + mime_type: row.get(2)?, + file_path: row.get(3)?, + file_size: row.get::<_, i64>(4)? as u64, + extracted_text: row.get(5)?, + ai_description: row.get(6)?, + session_id: row.get(7)?, + channel: row.get(8)?, + uploaded_at: row.get(9)?, + tags: row.get(10)?, + }) + } +} diff --git a/src/ftms/mod.rs b/src/ftms/mod.rs index e63252bffd..696c3c5b4b 100644 --- a/src/ftms/mod.rs +++ b/src/ftms/mod.rs @@ -5,5 +5,9 @@ pub mod schema; pub mod storage; +pub mod index; +pub mod extract; +pub mod describe; pub use schema::{FileRecord, FileMetadata}; +pub use index::FileIndex; From b15a332ddfe99c85602c8055ac9c6504eba328dc Mon Sep 17 00:00:00 2001 From: modpunk Date: Sun, 22 Feb 2026 01:14:16 +0000 Subject: [PATCH 12/14] feat(ftms): add FtmsService orchestrator --- src/ftms/mod.rs | 62 ++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 61 insertions(+), 1 deletion(-) diff --git a/src/ftms/mod.rs b/src/ftms/mod.rs index 696c3c5b4b..3d48a019bb 100644 --- a/src/ftms/mod.rs +++ b/src/ftms/mod.rs @@ -9,5 +9,65 @@ pub mod index; pub mod extract; pub mod describe; -pub use schema::{FileRecord, FileMetadata}; +pub use schema::{FileRecord, FileMetadata, FileSearchResult, FileListResponse}; pub use index::FileIndex; +pub use storage::FileStorage; + +use anyhow::Result; +use chrono::Local; +use std::path::Path; +use std::sync::Arc; +use uuid::Uuid; + +/// Main FTMS service — coordinates storage, indexing, and extraction. +pub struct FtmsService { + pub storage: FileStorage, + pub index: Arc, +} + +impl FtmsService { + pub fn new(storage_dir: &str, workspace_dir: &Path) -> Result { + let storage = FileStorage::new(storage_dir)?; + let index = Arc::new(FileIndex::new(workspace_dir)?); + Ok(Self { storage, index }) + } + + /// Upload a file: store on disk, extract text, index metadata. + pub async fn upload( + &self, + filename: &str, + data: &[u8], + metadata: FileMetadata, + ) -> Result { + let id = Uuid::new_v4().to_string(); + let mime_type = extract::guess_mime_type(filename); + + // Store file on disk + let (rel_path, _abs_path) = self.storage.store(filename, data).await?; + + // Extract text content + let extracted_text = extract::extract_text(data, &mime_type, filename)?; + + // Generate AI description for media files + let ai_description = describe::describe_media(data, &mime_type, filename)?; + + let record = FileRecord { + id, + filename: filename.to_string(), + mime_type, + file_path: rel_path, + file_size: data.len() as u64, + extracted_text, + ai_description, + session_id: metadata.session_id, + channel: metadata.channel, + uploaded_at: Local::now().to_rfc3339(), + tags: metadata.tags, + }; + + // Index in SQLite + self.index.insert(&record)?; + + Ok(record) + } +} From acdc0c597a5cb57889f349f6599a17e4a4c453bd Mon Sep 17 00:00:00 2001 From: modpunk Date: Sun, 22 Feb 2026 01:24:14 +0000 Subject: [PATCH 13/14] feat(ftms): integrate FTMS routes into gateway --- Cargo.lock | 26 +++- Cargo.toml | 2 +- src/ftms/extract.rs | 4 +- src/gateway/mod.rs | 330 +++++++++++++++++++++++++++++++++++++++++++- 4 files changed, 357 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a16de6f823..fc36d0601f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -408,6 +408,7 @@ dependencies = [ "matchit", "memchr", "mime", + "multer", "percent-encoding", "pin-project-lite", "serde_core", @@ -3597,6 +3598,23 @@ dependencies = [ "pxfm", ] +[[package]] +name = "multer" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83e87776546dc87511aa5ee218730c92b666d7264ab6ed41f9d215af9cd5224b" +dependencies = [ + "bytes", + "encoding_rs", + "futures-util", + "http 1.4.0", + "httparse", + "memchr", + "mime", + "spin", + "version_check", +] + [[package]] name = "multimap" version = "0.10.1" @@ -5617,6 +5635,12 @@ dependencies = [ "windows-sys 0.60.2", ] +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + [[package]] name = "spki" version = "0.7.3" @@ -7107,7 +7131,7 @@ checksum = "24d643ce3fd3e5b54854602a080f34fb10ab75e0b813ee32d00ca2b44fa74762" dependencies = [ "either", "env_home", - "rustix 1.1.3", + "rustix", "winsafe", ] diff --git a/Cargo.toml b/Cargo.toml index 23d55b1686..1b3cd3b54d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -125,7 +125,7 @@ mail-parser = "0.11.2" async-imap = { version = "0.11",features = ["runtime-tokio"], default-features = false } # HTTP server (gateway) — replaces raw TCP for proper HTTP/1.1 compliance -axum = { version = "0.8", default-features = false, features = ["http1", "json", "tokio", "query", "ws", "macros"] } +axum = { version = "0.8", default-features = false, features = ["http1", "json", "tokio", "query", "ws", "macros", "multipart"] } tower = { version = "0.5", default-features = false } tower-http = { version = "0.6", default-features = false, features = ["limit", "timeout"] } http-body-util = "0.1" diff --git a/src/ftms/extract.rs b/src/ftms/extract.rs index cc21913b16..4ceac3e3a0 100644 --- a/src/ftms/extract.rs +++ b/src/ftms/extract.rs @@ -44,14 +44,14 @@ fn truncate_text(text: String) -> Option { } fn extract_pdf(data: &[u8]) -> Result> { - #[cfg(feature = "pdf")] + #[cfg(feature = "rag-pdf")] { match pdf_extract::extract_text_from_mem(data) { Ok(text) => Ok(truncate_text(text)), _ => Ok(None), } } - #[cfg(not(feature = "pdf"))] + #[cfg(not(feature = "rag-pdf"))] { let _ = data; Ok(Some("[PDF document — enable pdf feature for text extraction]".to_string())) diff --git a/src/gateway/mod.rs b/src/gateway/mod.rs index 97890d89f5..2428754b6e 100644 --- a/src/gateway/mod.rs +++ b/src/gateway/mod.rs @@ -19,7 +19,7 @@ use crate::util::truncate_with_ellipsis; use anyhow::{Context, Result}; use axum::{ body::Bytes, - extract::{ConnectInfo, Query, State}, + extract::{ConnectInfo, Multipart, Path as AxumPath, Query, State}, http::{header, HeaderMap, StatusCode}, response::{IntoResponse, Json}, routing::{get, post}, @@ -290,6 +290,8 @@ pub struct AppState { pub nextcloud_talk_webhook_secret: Option>, /// Observability backend for metrics scraping pub observer: Arc, + /// FTMS (File/Text Management System) service + pub ftms: Option>, } /// Run the HTTP gateway using axum with proper HTTP/1.1 compliance. @@ -493,6 +495,22 @@ pub async fn run_gateway(host: &str, port: u16, config: Config) -> Result<()> { idempotency_max_keys, )); + // ── FTMS ────────────────────────────────────────────────── + let ftms = if config.ftms.enabled { + match crate::ftms::FtmsService::new(&config.ftms.storage_dir, &config.workspace_dir) { + Ok(svc) => { + tracing::info!("FTMS enabled, storage: {}", config.ftms.storage_dir); + Some(Arc::new(svc)) + } + Err(e) => { + tracing::error!("FTMS init failed: {e}"); + None + } + } + } else { + None + }; + // ── Tunnel ──────────────────────────────────────────────── let tunnel = crate::tunnel::create_tunnel(&config.tunnel)?; let mut tunnel_url: Option = None; @@ -527,6 +545,11 @@ pub async fn run_gateway(host: &str, port: u16, config: Config) -> Result<()> { if nextcloud_talk_channel.is_some() { println!(" POST /nextcloud-talk — Nextcloud Talk bot webhook"); } + if ftms.is_some() { + println!(" POST /upload — FTMS file upload (multipart)"); + println!(" GET /files — list uploaded files"); + println!(" GET /files/search?q= — full-text search"); + } println!(" GET /health — health check"); println!(" GET /metrics — Prometheus metrics"); if let Some(code) = pairing.pairing_code() { @@ -568,8 +591,16 @@ pub async fn run_gateway(host: &str, port: u16, config: Config) -> Result<()> { nextcloud_talk: nextcloud_talk_channel, nextcloud_talk_webhook_secret, observer, + ftms, }; + // Build FTMS upload router with higher body limit + let upload_limit = config.ftms.max_upload_size_mb * 1024 * 1024; + let upload_router = Router::new() + .route("/upload", post(handle_ftms_upload)) + .layer(RequestBodyLimitLayer::new(upload_limit)) + .with_state(state.clone()); + // Build router with middleware let app = Router::new() .route("/health", get(handle_health)) @@ -580,6 +611,10 @@ pub async fn run_gateway(host: &str, port: u16, config: Config) -> Result<()> { .route("/whatsapp", post(handle_whatsapp_message)) .route("/linq", post(handle_linq_webhook)) .route("/nextcloud-talk", post(handle_nextcloud_talk_webhook)) + .route("/files", get(handle_ftms_list)) + .route("/files/search", get(handle_ftms_search)) + .route("/files/{id}", get(handle_ftms_get)) + .route("/files/{id}/download", get(handle_ftms_download)) .with_state(state) .layer(RequestBodyLimitLayer::new(MAX_BODY_SIZE)) .layer(TimeoutLayer::with_status_code( @@ -587,6 +622,9 @@ pub async fn run_gateway(host: &str, port: u16, config: Config) -> Result<()> { Duration::from_secs(REQUEST_TIMEOUT_SECS), )); + // Merge FTMS upload router (its own body limit) with main router + let app = upload_router.merge(app); + // Run the server axum::serve( listener, @@ -1335,6 +1373,287 @@ async fn handle_nextcloud_talk_webhook( (StatusCode::OK, Json(serde_json::json!({"status": "ok"}))) } + + +// ══════════════════════════════════════════════════════════════════════════════ +// FTMS HANDLERS +// ══════════════════════════════════════════════════════════════════════════════ + +fn check_bearer_auth(state: &AppState, headers: &HeaderMap) -> bool { + if !state.pairing.require_pairing() { + return true; + } + let auth = headers + .get(header::AUTHORIZATION) + .and_then(|v| v.to_str().ok()) + .unwrap_or(""); + let token = auth.strip_prefix("Bearer ").unwrap_or(""); + state.pairing.is_authenticated(token) +} + +async fn handle_ftms_upload( + State(state): State, + headers: HeaderMap, + mut multipart: Multipart, +) -> impl IntoResponse { + if !check_bearer_auth(&state, &headers) { + return ( + StatusCode::UNAUTHORIZED, + Json(serde_json::json!({"error": "Unauthorized"})), + ) + .into_response(); + } + + let ftms = match &state.ftms { + Some(f) => f, + None => { + return ( + StatusCode::SERVICE_UNAVAILABLE, + Json(serde_json::json!({"error": "FTMS not enabled"})), + ) + .into_response() + } + }; + + let mut file_data: Option<(String, Vec)> = None; + let mut session_id: Option = None; + let mut channel: Option = None; + let mut tags: Option = None; + + while let Ok(Some(field)) = multipart.next_field().await { + let name = field.name().unwrap_or("").to_string(); + match name.as_str() { + "file" => { + let fname = field.file_name().unwrap_or("upload").to_string(); + if let Ok(bytes) = field.bytes().await { + file_data = Some((fname, bytes.to_vec())); + } + } + "session_id" => { + session_id = field.text().await.ok(); + } + "channel" => { + channel = field.text().await.ok(); + } + "tags" => { + tags = field.text().await.ok(); + } + _ => {} + } + } + + let (filename, data) = match file_data { + Some(d) => d, + None => { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({"error": "No file field in multipart"})), + ) + .into_response() + } + }; + + let metadata = crate::ftms::FileMetadata { + session_id, + channel, + tags, + }; + + match ftms.upload(&filename, &data, metadata).await { + Ok(record) => (StatusCode::OK, Json(serde_json::json!(record))).into_response(), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()})), + ) + .into_response(), + } +} + +async fn handle_ftms_list( + State(state): State, + headers: HeaderMap, + Query(params): Query>, +) -> impl IntoResponse { + if !check_bearer_auth(&state, &headers) { + return ( + StatusCode::UNAUTHORIZED, + Json(serde_json::json!({"error": "Unauthorized"})), + ) + .into_response(); + } + let ftms = match &state.ftms { + Some(f) => f, + None => { + return ( + StatusCode::SERVICE_UNAVAILABLE, + Json(serde_json::json!({"error": "FTMS not enabled"})), + ) + .into_response() + } + }; + let offset = params + .get("offset") + .and_then(|v| v.parse().ok()) + .unwrap_or(0usize); + let limit = params + .get("limit") + .and_then(|v| v.parse().ok()) + .unwrap_or(20usize); + let session_id = params.get("session_id").map(|s| s.as_str()); + let mime_prefix = params.get("type").map(|s| s.as_str()); + + match ftms.index.list(offset, limit, session_id, mime_prefix) { + Ok(resp) => (StatusCode::OK, Json(serde_json::json!(resp))).into_response(), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()})), + ) + .into_response(), + } +} + +async fn handle_ftms_search( + State(state): State, + headers: HeaderMap, + Query(params): Query>, +) -> impl IntoResponse { + if !check_bearer_auth(&state, &headers) { + return ( + StatusCode::UNAUTHORIZED, + Json(serde_json::json!({"error": "Unauthorized"})), + ) + .into_response(); + } + let ftms = match &state.ftms { + Some(f) => f, + None => { + return ( + StatusCode::SERVICE_UNAVAILABLE, + Json(serde_json::json!({"error": "FTMS not enabled"})), + ) + .into_response() + } + }; + let query = match params.get("q") { + Some(q) if !q.is_empty() => q.as_str(), + _ => { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({"error": "Missing ?q= parameter"})), + ) + .into_response() + } + }; + let limit = params + .get("limit") + .and_then(|v| v.parse().ok()) + .unwrap_or(20usize); + + match ftms.index.search(query, limit) { + Ok(results) => (StatusCode::OK, Json(serde_json::json!(results))).into_response(), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()})), + ) + .into_response(), + } +} + +async fn handle_ftms_get( + State(state): State, + headers: HeaderMap, + AxumPath(id): AxumPath, +) -> impl IntoResponse { + if !check_bearer_auth(&state, &headers) { + return ( + StatusCode::UNAUTHORIZED, + Json(serde_json::json!({"error": "Unauthorized"})), + ) + .into_response(); + } + let ftms = match &state.ftms { + Some(f) => f, + None => { + return ( + StatusCode::SERVICE_UNAVAILABLE, + Json(serde_json::json!({"error": "FTMS not enabled"})), + ) + .into_response() + } + }; + match ftms.index.get(&id) { + Ok(Some(record)) => (StatusCode::OK, Json(serde_json::json!(record))).into_response(), + Ok(None) => ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({"error": "File not found"})), + ) + .into_response(), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()})), + ) + .into_response(), + } +} + +async fn handle_ftms_download( + State(state): State, + headers: HeaderMap, + AxumPath(id): AxumPath, +) -> impl IntoResponse { + if !check_bearer_auth(&state, &headers) { + return ( + StatusCode::UNAUTHORIZED, + Json(serde_json::json!({"error": "Unauthorized"})), + ) + .into_response(); + } + let ftms = match &state.ftms { + Some(f) => f, + None => { + return ( + StatusCode::SERVICE_UNAVAILABLE, + Json(serde_json::json!({"error": "FTMS not enabled"})), + ) + .into_response() + } + }; + let record = match ftms.index.get(&id) { + Ok(Some(r)) => r, + Ok(None) => { + return ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({"error": "File not found"})), + ) + .into_response() + } + Err(e) => { + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()})), + ) + .into_response() + } + }; + match ftms.storage.read(&record.file_path).await { + Ok(data) => { + let headers = [ + (header::CONTENT_TYPE, record.mime_type), + ( + header::CONTENT_DISPOSITION, + format!("attachment; filename=\"{}\"", record.filename), + ), + ]; + (StatusCode::OK, headers, data).into_response() + } + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": e.to_string()})), + ) + .into_response(), + } +} + #[cfg(test)] mod tests { use super::*; @@ -1413,6 +1732,7 @@ mod tests { nextcloud_talk: None, nextcloud_talk_webhook_secret: None, observer: Arc::new(crate::observability::NoopObserver), + ftms: None, }; let response = handle_metrics(State(state)).await.into_response(); @@ -1458,6 +1778,7 @@ mod tests { nextcloud_talk: None, nextcloud_talk_webhook_secret: None, observer, + ftms: None, }; let response = handle_metrics(State(state)).await.into_response(); @@ -1820,6 +2141,7 @@ mod tests { nextcloud_talk: None, nextcloud_talk_webhook_secret: None, observer: Arc::new(crate::observability::NoopObserver), + ftms: None, }; let mut headers = HeaderMap::new(); @@ -1880,6 +2202,7 @@ mod tests { nextcloud_talk: None, nextcloud_talk_webhook_secret: None, observer: Arc::new(crate::observability::NoopObserver), + ftms: None, }; let headers = HeaderMap::new(); @@ -1952,6 +2275,7 @@ mod tests { nextcloud_talk: None, nextcloud_talk_webhook_secret: None, observer: Arc::new(crate::observability::NoopObserver), + ftms: None, }; let response = handle_webhook( @@ -1996,6 +2320,7 @@ mod tests { nextcloud_talk: None, nextcloud_talk_webhook_secret: None, observer: Arc::new(crate::observability::NoopObserver), + ftms: None, }; let mut headers = HeaderMap::new(); @@ -2045,6 +2370,7 @@ mod tests { nextcloud_talk: None, nextcloud_talk_webhook_secret: None, observer: Arc::new(crate::observability::NoopObserver), + ftms: None, }; let mut headers = HeaderMap::new(); @@ -2099,6 +2425,7 @@ mod tests { nextcloud_talk: None, nextcloud_talk_webhook_secret: None, observer: Arc::new(crate::observability::NoopObserver), + ftms: None, }; let response = handle_nextcloud_talk_webhook( @@ -2149,6 +2476,7 @@ mod tests { nextcloud_talk: Some(channel), nextcloud_talk_webhook_secret: Some(Arc::from(secret)), observer: Arc::new(crate::observability::NoopObserver), + ftms: None, }; let mut headers = HeaderMap::new(); From c61523cf83b176d979a56cd91b794373acd46cd2 Mon Sep 17 00:00:00 2001 From: modpunk Date: Sun, 22 Feb 2026 03:19:25 +0000 Subject: [PATCH 14/14] fix: resolve pre-existing build errors (postgres feature gate, futures-util, duplicate chat fn, bin ftms module) --- Cargo.toml | 2 +- src/agent/agent.rs | 2 +- src/agent/loop_.rs | 2 +- src/main.rs | 1 + src/memory/cli.rs | 5 ++ src/providers/reliable.rs | 109 -------------------------------------- 6 files changed, 9 insertions(+), 112 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 1b3cd3b54d..c714f9c7f7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -111,7 +111,7 @@ which = "7.0" # WebSocket client channels (Discord/Lark/DingTalk) tokio-tungstenite = { version = "0.28", features = ["rustls-tls-webpki-roots"] } -futures-util = { version = "0.3", default-features = false, features = ["sink"] } +futures-util = { version = "0.3", default-features = false, features = ["sink", "alloc"] } regex = "1.10" hostname = "0.4.2" rustls = "0.23" diff --git a/src/agent/agent.rs b/src/agent/agent.rs index d1affdaafa..67ef5baf75 100644 --- a/src/agent/agent.rs +++ b/src/agent/agent.rs @@ -421,7 +421,7 @@ impl Agent { .iter() .map(|call| self.execute_tool_call(call)) .collect(); - futures::future::join_all(futs).await + futures_util::future::join_all(futs).await } fn classify_model(&self, user_message: &str) -> String { diff --git a/src/agent/loop_.rs b/src/agent/loop_.rs index 0b8d251186..fbedd48355 100644 --- a/src/agent/loop_.rs +++ b/src/agent/loop_.rs @@ -1083,7 +1083,7 @@ async fn execute_tools_parallel( }) .collect(); - let results = futures::future::join_all(futures).await; + let results = futures_util::future::join_all(futures).await; results.into_iter().collect() } diff --git a/src/main.rs b/src/main.rs index 3b12e19768..52e8cd94ae 100644 --- a/src/main.rs +++ b/src/main.rs @@ -59,6 +59,7 @@ mod config; mod cron; mod daemon; mod doctor; +mod ftms; mod gateway; mod hardware; mod health; diff --git a/src/memory/cli.rs b/src/memory/cli.rs index 1683755498..556ee06743 100644 --- a/src/memory/cli.rs +++ b/src/memory/cli.rs @@ -39,6 +39,7 @@ fn create_cli_memory(config: &Config) -> Result> { MemoryBackendKind::None => { bail!("Memory backend is 'none' (disabled). No entries to manage."); } + #[cfg(feature = "memory-postgres")] MemoryBackendKind::Postgres => { let sp = &config.storage.provider.config; let db_url = sp @@ -53,6 +54,10 @@ fn create_cli_memory(config: &Config) -> Result> { super::PostgresMemory::new(db_url, &sp.schema, &sp.table, sp.connect_timeout_secs)?; Ok(Box::new(mem)) } + #[cfg(not(feature = "memory-postgres"))] + MemoryBackendKind::Postgres => { + bail!("memory backend 'postgres' requires the memory-postgres feature"); + } _ => create_memory_for_migration(&backend, &config.workspace_dir), } } diff --git a/src/providers/reliable.rs b/src/providers/reliable.rs index 6a8ec1a96b..c430a93a2f 100644 --- a/src/providers/reliable.rs +++ b/src/providers/reliable.rs @@ -659,115 +659,6 @@ impl Provider for ReliableProvider { .any(|(_, provider)| provider.supports_vision()) } - async fn chat( - &self, - request: ChatRequest<'_>, - model: &str, - temperature: f64, - ) -> anyhow::Result { - let models = self.model_chain(model); - let mut failures = Vec::new(); - - for current_model in &models { - for (provider_name, provider) in &self.providers { - let mut backoff_ms = self.base_backoff_ms; - - for attempt in 0..=self.max_retries { - let req = ChatRequest { - messages: request.messages, - tools: request.tools, - }; - match provider.chat(req, current_model, temperature).await { - Ok(resp) => { - if attempt > 0 || *current_model != model { - tracing::info!( - provider = provider_name, - model = *current_model, - attempt, - original_model = model, - "Provider recovered (failover/retry)" - ); - } - return Ok(resp); - } - Err(e) => { - let non_retryable_rate_limit = is_non_retryable_rate_limit(&e); - let non_retryable = is_non_retryable(&e) || non_retryable_rate_limit; - let rate_limited = is_rate_limited(&e); - let failure_reason = failure_reason(rate_limited, non_retryable); - let error_detail = compact_error_detail(&e); - - push_failure( - &mut failures, - provider_name, - current_model, - attempt + 1, - self.max_retries + 1, - failure_reason, - &error_detail, - ); - - if rate_limited && !non_retryable_rate_limit { - if let Some(new_key) = self.rotate_key() { - tracing::info!( - provider = provider_name, - error = %error_detail, - "Rate limited, rotated API key (key ending ...{})", - &new_key[new_key.len().saturating_sub(4)..] - ); - } - } - - if non_retryable { - tracing::warn!( - provider = provider_name, - model = *current_model, - error = %error_detail, - "Non-retryable error, moving on" - ); - - if is_context_window_exceeded(&e) { - anyhow::bail!( - "Request exceeds model context window; retries and fallbacks were skipped. Attempts:\n{}", - failures.join("\n") - ); - } - - break; - } - - if attempt < self.max_retries { - let wait = self.compute_backoff(backoff_ms, &e); - tracing::warn!( - provider = provider_name, - model = *current_model, - attempt = attempt + 1, - backoff_ms = wait, - reason = failure_reason, - error = %error_detail, - "Provider call failed, retrying" - ); - tokio::time::sleep(Duration::from_millis(wait)).await; - backoff_ms = (backoff_ms.saturating_mul(2)).min(10_000); - } - } - } - } - - tracing::warn!( - provider = provider_name, - model = *current_model, - "Exhausted retries, trying next provider/model" - ); - } - } - - anyhow::bail!( - "All providers/models failed. Attempts:\n{}", - failures.join("\n") - ) - } - async fn chat_with_tools( &self, messages: &[ChatMessage],