diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b0e05c5..be1ff08 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -48,6 +48,7 @@ jobs: run: | VERSION="${{ steps.get_version.outputs.version }}" BUNDLE_NAME="tinyclaw-bundle.tar.gz" + CHECKSUM_NAME="tinyclaw-bundle.sha256" TEMP_DIR=$(mktemp -d) BUNDLE_DIR="$TEMP_DIR/tinyclaw" @@ -89,11 +90,17 @@ jobs: # Create tarball cd "$TEMP_DIR" tar -czf "$GITHUB_WORKSPACE/$BUNDLE_NAME" tinyclaw/ + if command -v shasum >/dev/null 2>&1; then + shasum -a 256 "$GITHUB_WORKSPACE/$BUNDLE_NAME" | awk '{print $1}' > "$GITHUB_WORKSPACE/$CHECKSUM_NAME" + else + sha256sum "$GITHUB_WORKSPACE/$BUNDLE_NAME" | awk '{print $1}' > "$GITHUB_WORKSPACE/$CHECKSUM_NAME" + fi # Get bundle info BUNDLE_SIZE=$(du -h "$GITHUB_WORKSPACE/$BUNDLE_NAME" | cut -f1) echo "Bundle created: $BUNDLE_NAME ($BUNDLE_SIZE)" echo "bundle_name=$BUNDLE_NAME" >> $GITHUB_OUTPUT + echo "checksum_name=$CHECKSUM_NAME" >> $GITHUB_OUTPUT echo "bundle_size=$BUNDLE_SIZE" >> $GITHUB_OUTPUT - name: Generate release notes @@ -138,6 +145,7 @@ jobs: body_path: release_notes.md files: | tinyclaw-bundle.tar.gz + tinyclaw-bundle.sha256 draft: false prerelease: false env: @@ -148,5 +156,7 @@ jobs: uses: actions/upload-artifact@v4 with: name: tinyclaw-bundle - path: tinyclaw-bundle.tar.gz + path: | + tinyclaw-bundle.tar.gz + tinyclaw-bundle.sha256 retention-days: 7 diff --git a/README.md b/README.md index 8490849..e7abb4f 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,7 @@ Run multiple teams of AI agents that collaborate with each other simultaneously - Node.js v14+ - tmux - Bash 4.0+ (macOS: `brew install bash`) +- jq (`brew install jq` or `apt install jq`) - [Claude Code CLI](https://claude.com/claude-code) (for Anthropic provider) - [Codex CLI](https://docs.openai.com/codex) (for OpenAI provider) diff --git a/docs/INSTALL.md b/docs/INSTALL.md index 8dcd82c..4dd018e 100644 --- a/docs/INSTALL.md +++ b/docs/INSTALL.md @@ -9,7 +9,7 @@ curl -fsSL https://raw.githubusercontent.com/jlia0/tinyclaw/main/scripts/remote- ``` This one-line command: -- ✅ Checks all dependencies (node, npm, tmux, claude) +- ✅ Checks all dependencies (node, npm, tmux, claude, jq) - ✅ Downloads pre-built bundle (no npm install needed!) - ✅ Installs to `~/.tinyclaw` - ✅ Creates global `tinyclaw` command @@ -28,6 +28,7 @@ Before installing, ensure you have: - **npm** (comes with Node.js) - **tmux** - `sudo apt install tmux` or `brew install tmux` - **Claude Code CLI** ([claude.com/claude-code](https://claude.com/claude-code)) +- **jq** - `sudo apt install jq` or `brew install jq` **Optional:** - **git** (only needed for source install) @@ -191,6 +192,10 @@ brew install tmux # macOS # Claude Code # Visit: https://claude.com/claude-code + +# jq +sudo apt install jq # Ubuntu/Debian +brew install jq # macOS ``` ### Bundle download fails diff --git a/lib/daemon.sh b/lib/daemon.sh index 839b9cb..ab502af 100644 --- a/lib/daemon.sh +++ b/lib/daemon.sh @@ -18,23 +18,22 @@ start_daemon() { PUPPETEER_SKIP_DOWNLOAD=true npm install fi - # Build TypeScript if any src file is newer than its dist counterpart + # Build TypeScript if source changed since last build stamp. local needs_build=false - if [ ! -d "$SCRIPT_DIR/dist" ]; then + local build_stamp="$SCRIPT_DIR/dist/.build-stamp" + if [ ! -d "$SCRIPT_DIR/dist" ] || [ ! -f "$build_stamp" ]; then needs_build=true else - for ts_file in "$SCRIPT_DIR"/src/*.ts; do - local js_file="$SCRIPT_DIR/dist/$(basename "${ts_file%.ts}.js")" - if [ ! -f "$js_file" ] || [ "$ts_file" -nt "$js_file" ]; then - needs_build=true - break - fi - done + if find "$SCRIPT_DIR/src" -type f \( -name "*.ts" -o -name "*.tsx" \) -newer "$build_stamp" | grep -q .; then + needs_build=true + fi fi if [ "$needs_build" = true ]; then echo -e "${YELLOW}Building TypeScript...${NC}" cd "$SCRIPT_DIR" npm run build + mkdir -p "$SCRIPT_DIR/dist" + touch "$build_stamp" fi # Load settings or run setup wizard diff --git a/lib/messaging.sh b/lib/messaging.sh index a36a894..376f8db 100644 --- a/lib/messaging.sh +++ b/lib/messaging.sh @@ -9,7 +9,12 @@ send_message() { log "[$source] Sending: ${message:0:50}..." cd "$SCRIPT_DIR" - RESPONSE=$(claude --dangerously-skip-permissions -c -p "$message" 2>&1) + local claude_cmd=(claude -c -p "$message") + if [ "${TINYCLAW_ALLOW_DANGEROUS_FLAGS:-0}" = "1" ]; then + claude_cmd=(claude --dangerously-skip-permissions -c -p "$message") + log "[$source] WARNING: dangerous permissions flag enabled via TINYCLAW_ALLOW_DANGEROUS_FLAGS=1" + fi + RESPONSE=$("${claude_cmd[@]}" 2>&1) echo "$RESPONSE" diff --git a/lib/setup-wizard.sh b/lib/setup-wizard.sh index c7acfe2..3170903 100755 --- a/lib/setup-wizard.sh +++ b/lib/setup-wizard.sh @@ -17,6 +17,31 @@ echo -e "${GREEN} TinyClaw - Setup Wizard${NC}" echo -e "${BLUE}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" echo "" +if ! command -v jq &> /dev/null; then + echo -e "${RED}Error: jq is required for setup.${NC}" + echo "Install with: brew install jq (macOS) or apt-get install jq (Linux)" + exit 1 +fi + +csv_to_json_array() { + local csv="$1" + local values=() + IFS=',' read -ra raw_values <<< "$csv" + for raw in "${raw_values[@]}"; do + local value + value="$(echo "$raw" | tr -d '[:space:]')" + if [ -n "$value" ]; then + values+=("$value") + fi + done + + if [ ${#values[@]} -eq 0 ]; then + jq -n '[]' + else + jq -n --args "${values[@]}" '$ARGS.positional' + fi +} + # --- Channel registry --- # To add a new channel, add its ID here and fill in the config arrays below. ALL_CHANNELS=(telegram discord whatsapp) @@ -184,13 +209,19 @@ echo "Users route messages with '@agent_id message' in chat." echo "" read -rp "Set up additional agents? [y/N]: " SETUP_AGENTS -AGENTS_JSON="" -# Always create the default agent DEFAULT_AGENT_DIR="$WORKSPACE_PATH/$DEFAULT_AGENT_NAME" # Capitalize first letter of agent name (proper bash method) DEFAULT_AGENT_DISPLAY="$(tr '[:lower:]' '[:upper:]' <<< "${DEFAULT_AGENT_NAME:0:1}")${DEFAULT_AGENT_NAME:1}" -AGENTS_JSON='"agents": {' -AGENTS_JSON="$AGENTS_JSON \"$DEFAULT_AGENT_NAME\": { \"name\": \"$DEFAULT_AGENT_DISPLAY\", \"provider\": \"$PROVIDER\", \"model\": \"$MODEL\", \"working_directory\": \"$DEFAULT_AGENT_DIR\" }" +declare -a ALL_AGENT_IDS=("$DEFAULT_AGENT_NAME") +declare -A AGENT_NAME_MAP=() +declare -A AGENT_PROVIDER_MAP=() +declare -A AGENT_MODEL_MAP=() +declare -A AGENT_DIR_MAP=() + +AGENT_NAME_MAP["$DEFAULT_AGENT_NAME"]="$DEFAULT_AGENT_DISPLAY" +AGENT_PROVIDER_MAP["$DEFAULT_AGENT_NAME"]="$PROVIDER" +AGENT_MODEL_MAP["$DEFAULT_AGENT_NAME"]="$MODEL" +AGENT_DIR_MAP["$DEFAULT_AGENT_NAME"]="$DEFAULT_AGENT_DIR" ADDITIONAL_AGENTS=() # Track additional agent IDs for directory creation @@ -240,8 +271,11 @@ if [[ "$SETUP_AGENTS" =~ ^[yY] ]]; then fi NEW_AGENT_DIR="$WORKSPACE_PATH/$NEW_AGENT_ID" - - AGENTS_JSON="$AGENTS_JSON, \"$NEW_AGENT_ID\": { \"name\": \"$NEW_AGENT_NAME\", \"provider\": \"$NEW_PROVIDER\", \"model\": \"$NEW_MODEL\", \"working_directory\": \"$NEW_AGENT_DIR\" }" + ALL_AGENT_IDS+=("$NEW_AGENT_ID") + AGENT_NAME_MAP["$NEW_AGENT_ID"]="$NEW_AGENT_NAME" + AGENT_PROVIDER_MAP["$NEW_AGENT_ID"]="$NEW_PROVIDER" + AGENT_MODEL_MAP["$NEW_AGENT_ID"]="$NEW_MODEL" + AGENT_DIR_MAP["$NEW_AGENT_ID"]="$NEW_AGENT_DIR" # Track this agent for directory creation later ADDITIONAL_AGENTS+=("$NEW_AGENT_ID") @@ -250,59 +284,111 @@ if [[ "$SETUP_AGENTS" =~ ^[yY] ]]; then done fi -AGENTS_JSON="$AGENTS_JSON }," +# Security defaults +echo -e "${BLUE}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" +echo -e "${GREEN} Security Defaults${NC}" +echo -e "${BLUE}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" +echo "" +echo "To reduce abuse risk, TinyClaw enforces per-channel sender allowlists by default." +echo "Add trusted sender IDs now (comma-separated), or leave blank to block that channel until configured." +echo "" -# Build enabled channels array JSON -CHANNELS_JSON="[" -for i in "${!ENABLED_CHANNELS[@]}"; do - if [ $i -gt 0 ]; then - CHANNELS_JSON="${CHANNELS_JSON}, " - fi - CHANNELS_JSON="${CHANNELS_JSON}\"${ENABLED_CHANNELS[$i]}\"" +declare -A ALLOWED_SENDERS_JSON=() +for ch in "${ENABLED_CHANNELS[@]}"; do + read -rp "Trusted sender IDs for ${CHANNEL_DISPLAY[$ch]}: " sender_ids + ALLOWED_SENDERS_JSON["$ch"]="$(csv_to_json_array "$sender_ids")" done -CHANNELS_JSON="${CHANNELS_JSON}]" -# Build channel configs with tokens -DISCORD_TOKEN="${TOKENS[discord]:-}" -TELEGRAM_TOKEN="${TOKENS[telegram]:-}" +echo "" +read -rp "Allow dangerous agent permission-bypass flags? [y/N]: " ALLOW_DANGEROUS_INPUT +if [[ "$ALLOW_DANGEROUS_INPUT" =~ ^[yY] ]]; then + ALLOW_DANGEROUS_FLAGS=true +else + ALLOW_DANGEROUS_FLAGS=false +fi -# Write settings.json with layered structure -# Use jq to build valid JSON to avoid escaping issues with agent prompts -if [ "$PROVIDER" = "anthropic" ]; then - MODELS_SECTION='"models": { "provider": "anthropic", "anthropic": { "model": "'"${MODEL}"'" } }' +read -rp "Persist full team chat transcripts to disk? [y/N]: " PERSIST_CHATS_INPUT +if [[ "$PERSIST_CHATS_INPUT" =~ ^[yY] ]]; then + PERSIST_TEAM_CHATS=true else - MODELS_SECTION='"models": { "provider": "openai", "openai": { "model": "'"${MODEL}"'" } }' + PERSIST_TEAM_CHATS=false fi +echo "" -cat > "$SETTINGS_FILE" < /dev/null; then - tmp_file="$SETTINGS_FILE.tmp" - jq '.' "$SETTINGS_FILE" > "$tmp_file" 2>/dev/null && mv "$tmp_file" "$SETTINGS_FILE" -fi +# Build channel configs with tokens +DISCORD_TOKEN="${TOKENS[discord]:-}" +TELEGRAM_TOKEN="${TOKENS[telegram]:-}" + +# Build allowed_senders object (all channels present for consistency). +ALLOWLIST_DISCORD="${ALLOWED_SENDERS_JSON[discord]:-[]}" +ALLOWLIST_TELEGRAM="${ALLOWED_SENDERS_JSON[telegram]:-[]}" +ALLOWLIST_WHATSAPP="${ALLOWED_SENDERS_JSON[whatsapp]:-[]}" + +mkdir -p "$(dirname "$SETTINGS_FILE")" + +jq -n \ + --arg workspacePath "$WORKSPACE_PATH" \ + --arg workspaceName "$WORKSPACE_NAME" \ + --argjson channelsEnabled "$CHANNELS_ENABLED_JSON" \ + --arg discordToken "$DISCORD_TOKEN" \ + --arg telegramToken "$TELEGRAM_TOKEN" \ + --argjson agents "$AGENTS_OBJECT" \ + --arg provider "$PROVIDER" \ + --arg model "$MODEL" \ + --argjson heartbeatInterval "$HEARTBEAT_INTERVAL" \ + --argjson allowlistDiscord "$ALLOWLIST_DISCORD" \ + --argjson allowlistTelegram "$ALLOWLIST_TELEGRAM" \ + --argjson allowlistWhatsapp "$ALLOWLIST_WHATSAPP" \ + --argjson allowDangerous "$ALLOW_DANGEROUS_FLAGS" \ + --argjson persistTeamChats "$PERSIST_TEAM_CHATS" \ + '{ + workspace: { + path: $workspacePath, + name: $workspaceName + }, + channels: { + enabled: $channelsEnabled, + discord: { bot_token: $discordToken }, + telegram: { bot_token: $telegramToken }, + whatsapp: {} + }, + agents: $agents, + models: ( + if $provider == "anthropic" + then { provider: "anthropic", anthropic: { model: $model } } + else { provider: "openai", openai: { model: $model } } + end + ), + monitoring: { + heartbeat_interval: $heartbeatInterval + }, + security: { + require_sender_allowlist: true, + allowed_senders: { + discord: $allowlistDiscord, + telegram: $allowlistTelegram, + whatsapp: $allowlistWhatsapp + }, + allow_dangerous_agent_flags: $allowDangerous, + allow_outbound_file_paths_outside_files_dir: false, + persist_team_chats: $persistTeamChats + } + }' > "$SETTINGS_FILE" # Create workspace directory mkdir -p "$WORKSPACE_PATH" diff --git a/lib/update.sh b/lib/update.sh index 62610ee..f5cae38 100755 --- a/lib/update.sh +++ b/lib/update.sh @@ -6,6 +6,42 @@ GITHUB_REPO="jlia0/tinyclaw" UPDATE_CHECK_CACHE="$HOME/.tinyclaw/.update_check" UPDATE_CHECK_TTL=3600 # Check once per hour +verify_sha256() { + local file_path="$1" + local checksum_file="$2" + local expected actual + + expected=$(awk '{print $1}' "$checksum_file" 2>/dev/null | head -n1) + if [ -z "$expected" ]; then + return 1 + fi + + if command -v shasum &> /dev/null; then + actual=$(shasum -a 256 "$file_path" | awk '{print $1}') + elif command -v sha256sum &> /dev/null; then + actual=$(sha256sum "$file_path" | awk '{print $1}') + else + echo -e "${RED}Error: shasum or sha256sum is required to verify updates${NC}" + return 1 + fi + + [ "$actual" = "$expected" ] +} + +confirm_insecure_update() { + local reason="$1" + echo -e "${YELLOW}Warning: ${reason}${NC}" + echo -e "${YELLOW}The update bundle will NOT be integrity-verified.${NC}" + + if [ "${TINYCLAW_ALLOW_INSECURE_UPDATE:-}" = "1" ]; then + echo -e "${YELLOW}Proceeding because TINYCLAW_ALLOW_INSECURE_UPDATE=1${NC}" + return 0 + fi + + read -rp "Continue without checksum verification? [y/N]: " CONFIRM + [[ "$CONFIRM" =~ ^[yY]$ ]] +} + # Get current version get_current_version() { if [ -f "$SCRIPT_DIR/package.json" ]; then @@ -177,10 +213,12 @@ do_update() { # Download bundle local bundle_url="https://github.com/$GITHUB_REPO/releases/download/v${latest_version}/tinyclaw-bundle.tar.gz" + local checksum_url="https://github.com/$GITHUB_REPO/releases/download/v${latest_version}/tinyclaw-bundle.sha256" local temp_dir=$(mktemp -d) local bundle_file="$temp_dir/tinyclaw-bundle.tar.gz" + local checksum_file="$temp_dir/tinyclaw-bundle.sha256" - echo -e "${BLUE}[1/4] Downloading...${NC}" + echo -e "${BLUE}[1/5] Downloading...${NC}" if ! curl -fSL -o "$bundle_file" "$bundle_url" 2>&1 | grep -v "^ "; then echo -e "${RED}Error: Download failed${NC}" rm -rf "$temp_dir" @@ -189,8 +227,32 @@ do_update() { echo -e "${GREEN}✓ Downloaded${NC}" echo "" + echo -e "${BLUE}[2/5] Verifying integrity...${NC}" + local checksum_status + checksum_status=$(curl -sS -L -w "%{http_code}" -o "$checksum_file" "$checksum_url" || echo "000") + if [ "$checksum_status" = "200" ]; then + if ! verify_sha256 "$bundle_file" "$checksum_file"; then + echo -e "${RED}Error: Checksum verification failed${NC}" + rm -rf "$temp_dir" + return 1 + fi + echo -e "${GREEN}✓ Checksum verified${NC}" + elif [ "$checksum_status" = "404" ]; then + if ! confirm_insecure_update "No checksum asset found for v${latest_version} (legacy release)."; then + echo "Update cancelled." + rm -rf "$temp_dir" + return 1 + fi + echo -e "${YELLOW}⚠ Proceeding without checksum verification${NC}" + else + echo -e "${RED}Error: Could not download checksum file (HTTP ${checksum_status})${NC}" + rm -rf "$temp_dir" + return 1 + fi + echo "" + # Backup current installation - echo -e "${BLUE}[2/4] Backing up current installation...${NC}" + echo -e "${BLUE}[3/5] Backing up current installation...${NC}" local backup_dir="$HOME/.tinyclaw/backups/v${current_version}-$(date +%Y%m%d_%H%M%S)" mkdir -p "$backup_dir" @@ -206,7 +268,7 @@ do_update() { echo "" # Extract new version - echo -e "${BLUE}[3/4] Installing new version...${NC}" + echo -e "${BLUE}[4/5] Installing new version...${NC}" cd "$temp_dir" tar -xzf "$bundle_file" @@ -234,7 +296,7 @@ do_update() { # Clear update cache rm -f "$UPDATE_CHECK_CACHE" - echo -e "${BLUE}[4/4] Update complete!${NC}" + echo -e "${BLUE}[5/5] Update complete!${NC}" echo "" echo -e "${GREEN}╔════════════════════════════════════════╗${NC}" echo -e "${GREEN}║ Updated to v${latest_version}!${NC}" diff --git a/scripts/bundle.sh b/scripts/bundle.sh index 23f1c23..c1d2918 100755 --- a/scripts/bundle.sh +++ b/scripts/bundle.sh @@ -33,6 +33,7 @@ if [ -n "$GIT_TAG" ]; then fi BUNDLE_NAME="tinyclaw-bundle-${VERSION}.tar.gz" +CHECKSUM_NAME="tinyclaw-bundle-${VERSION}.sha256" TEMP_DIR=$(mktemp -d) BUNDLE_DIR="$TEMP_DIR/tinyclaw" @@ -113,6 +114,15 @@ cd "$SCRIPT_DIR" rm -rf "$TEMP_DIR" BUNDLE_SIZE=$(du -h "$BUNDLE_NAME" | cut -f1) + +if command -v shasum >/dev/null 2>&1; then + shasum -a 256 "$BUNDLE_NAME" | awk '{print $1}' > "$CHECKSUM_NAME" +elif command -v sha256sum >/dev/null 2>&1; then + sha256sum "$BUNDLE_NAME" | awk '{print $1}' > "$CHECKSUM_NAME" +else + echo -e "${YELLOW}⚠ Could not generate checksum (missing shasum/sha256sum)${NC}" +fi + echo -e "${GREEN}✓ Bundle created: $BUNDLE_NAME ($BUNDLE_SIZE)${NC}" echo "" diff --git a/scripts/remote-install.sh b/scripts/remote-install.sh index 04038b8..3db4300 100755 --- a/scripts/remote-install.sh +++ b/scripts/remote-install.sh @@ -16,6 +16,48 @@ GITHUB_REPO="jlia0/tinyclaw" DEFAULT_BRANCH="main" INSTALL_DIR="" +verify_sha256() { + local file_path="$1" + local checksum_file="$2" + local expected actual + + expected=$(awk '{print $1}' "$checksum_file" 2>/dev/null | head -n1) + if [ -z "$expected" ]; then + return 1 + fi + + if command -v shasum >/dev/null 2>&1; then + actual=$(shasum -a 256 "$file_path" | awk '{print $1}') + elif command -v sha256sum >/dev/null 2>&1; then + actual=$(sha256sum "$file_path" | awk '{print $1}') + else + echo -e "${RED}✗ Missing checksum tool (shasum or sha256sum)${NC}" + return 1 + fi + + [ "$actual" = "$expected" ] +} + +confirm_insecure_bundle() { + local reason="$1" + echo -e "${YELLOW}Warning: ${reason}${NC}" + echo -e "${YELLOW}Proceeding without checksum verification.${NC}" + + if [ "${TINYCLAW_ALLOW_INSECURE_BUNDLE:-}" = "1" ]; then + echo -e "${YELLOW}Proceeding because TINYCLAW_ALLOW_INSECURE_BUNDLE=1${NC}" + return 0 + fi + + if [ -t 0 ]; then + read -p "Continue with unverified bundle? (y/N) " -n 1 -r + echo "" + [[ $REPLY =~ ^[Yy]$ ]] + return + fi + + return 1 +} + echo "" echo -e "${BLUE}╔════════════════════════════════════════╗${NC}" echo -e "${BLUE}║ TinyClaw Remote Installer ║${NC}" @@ -53,6 +95,10 @@ if ! command_exists claude; then MISSING_DEPS+=("claude (Claude Code CLI)") fi +if ! command_exists jq; then + MISSING_DEPS+=("jq") +fi + if [ ${#MISSING_DEPS[@]} -ne 0 ]; then echo -e "${RED}✗ Missing dependencies:${NC}" for dep in "${MISSING_DEPS[@]}"; do @@ -63,6 +109,7 @@ if [ ${#MISSING_DEPS[@]} -ne 0 ]; then echo " - Node.js/npm: https://nodejs.org/" echo " - tmux: sudo apt install tmux (or brew install tmux)" echo " - Claude Code: https://claude.com/claude-code" + echo " - jq: sudo apt install jq (or brew install jq)" echo "" exit 1 fi @@ -99,11 +146,31 @@ LATEST_RELEASE=$(curl -fsSL "https://api.github.com/repos/$GITHUB_REPO/releases/ if [ -n "$LATEST_RELEASE" ]; then BUNDLE_URL="https://github.com/$GITHUB_REPO/releases/download/$LATEST_RELEASE/tinyclaw-bundle.tar.gz" + CHECKSUM_URL="https://github.com/$GITHUB_REPO/releases/download/$LATEST_RELEASE/tinyclaw-bundle.sha256" + + BUNDLE_AVAILABLE=false + CHECKSUM_AVAILABLE=false - # Check if bundle exists if curl -fsSL -I "$BUNDLE_URL" >/dev/null 2>&1; then - echo -e "${GREEN}✓ Pre-built bundle available ($LATEST_RELEASE)${NC}" + BUNDLE_AVAILABLE=true + fi + if curl -fsSL -I "$CHECKSUM_URL" >/dev/null 2>&1; then + CHECKSUM_AVAILABLE=true + fi + + if [ "$BUNDLE_AVAILABLE" = true ] && [ "$CHECKSUM_AVAILABLE" = true ]; then + echo -e "${GREEN}✓ Verified pre-built bundle available ($LATEST_RELEASE)${NC}" USE_BUNDLE=true + ALLOW_UNVERIFIED_BUNDLE=false + elif [ "$BUNDLE_AVAILABLE" = true ]; then + if confirm_insecure_bundle "Checksum asset not found for $LATEST_RELEASE (legacy release)."; then + USE_BUNDLE=true + ALLOW_UNVERIFIED_BUNDLE=true + echo -e "${YELLOW}⚠ Using unverified pre-built bundle${NC}" + else + echo -e "${YELLOW}⚠ Bundle checksum missing; falling back to source install${NC}" + USE_BUNDLE=false + fi else echo -e "${YELLOW}⚠ No pre-built bundle found, will build from source${NC}" USE_BUNDLE=false @@ -120,14 +187,29 @@ echo -e "${BLUE}[4/6] Downloading TinyClaw...${NC}" if [ "$USE_BUNDLE" = true ]; then # Download and extract bundle mkdir -p "$INSTALL_DIR" + TMP_DIR=$(mktemp -d) + BUNDLE_FILE="$TMP_DIR/tinyclaw-bundle.tar.gz" + CHECKSUM_FILE="$TMP_DIR/tinyclaw-bundle.sha256" echo "Downloading bundle..." - if curl -fsSL "$BUNDLE_URL" | tar -xz -C "$INSTALL_DIR" --strip-components=1; then + if [ "${ALLOW_UNVERIFIED_BUNDLE:-false}" = true ]; then + if curl -fsSL -o "$BUNDLE_FILE" "$BUNDLE_URL" \ + && tar -xzf "$BUNDLE_FILE" -C "$INSTALL_DIR" --strip-components=1; then + echo -e "${GREEN}✓ Bundle extracted (unverified)${NC}" + else + echo -e "${RED}✗ Failed to download bundle, falling back to source install${NC}" + USE_BUNDLE=false + fi + elif curl -fsSL -o "$BUNDLE_FILE" "$BUNDLE_URL" \ + && curl -fsSL -o "$CHECKSUM_FILE" "$CHECKSUM_URL" \ + && verify_sha256 "$BUNDLE_FILE" "$CHECKSUM_FILE" \ + && tar -xzf "$BUNDLE_FILE" -C "$INSTALL_DIR" --strip-components=1; then echo -e "${GREEN}✓ Bundle extracted${NC}" else - echo -e "${RED}✗ Failed to download bundle, falling back to source install${NC}" + echo -e "${RED}✗ Failed to verify bundle integrity, falling back to source install${NC}" USE_BUNDLE=false fi + rm -rf "$TMP_DIR" else # Clone from GitHub if ! command_exists git; then diff --git a/src/channels/discord-client.ts b/src/channels/discord-client.ts index 75894b2..93620ad 100644 --- a/src/channels/discord-client.ts +++ b/src/channels/discord-client.ts @@ -22,6 +22,9 @@ const QUEUE_OUTGOING = path.join(TINYCLAW_HOME, 'queue/outgoing'); const LOG_FILE = path.join(TINYCLAW_HOME, 'logs/discord.log'); const SETTINGS_FILE = path.join(TINYCLAW_HOME, 'settings.json'); const FILES_DIR = path.join(TINYCLAW_HOME, 'files'); +const MAX_DOWNLOAD_BYTES = Number(process.env.TINYCLAW_MAX_DOWNLOAD_BYTES || (25 * 1024 * 1024)); +const DOWNLOAD_TIMEOUT_MS = Number(process.env.TINYCLAW_DOWNLOAD_TIMEOUT_MS || 30000); +const MAX_REDIRECTS = 5; // Ensure directories exist [QUEUE_INCOMING, QUEUE_OUTGOING, path.dirname(LOG_FILE), FILES_DIR].forEach(dir => { @@ -81,28 +84,138 @@ function buildUniqueFilePath(dir: string, preferredName: string): string { return candidate; } +function pathInDirectory(candidatePath: string, directoryPath: string): boolean { + try { + const resolvedDir = fs.realpathSync(directoryPath); + const resolvedFile = fs.realpathSync(candidatePath); + if (resolvedFile === resolvedDir) return true; + const dirWithSep = resolvedDir.endsWith(path.sep) ? resolvedDir : `${resolvedDir}${path.sep}`; + return resolvedFile.startsWith(dirWithSep); + } catch { + return false; + } +} + +function readSecuritySettings(): { + requireSenderAllowlist: boolean; + allowOutsideFilesDir: boolean; + allowedSenders: string[]; +} { + try { + const settingsData = fs.readFileSync(SETTINGS_FILE, 'utf8'); + const settings = JSON.parse(settingsData); + return { + requireSenderAllowlist: settings?.security?.require_sender_allowlist !== false, + allowOutsideFilesDir: settings?.security?.allow_outbound_file_paths_outside_files_dir === true, + allowedSenders: settings?.security?.allowed_senders?.discord || [], + }; + } catch { + return { + requireSenderAllowlist: true, + allowOutsideFilesDir: false, + allowedSenders: [], + }; + } +} + +function isAllowedOutgoingFile(filePath: string): boolean { + if (!fs.existsSync(filePath)) return false; + const stat = fs.statSync(filePath); + if (!stat.isFile()) return false; + const security = readSecuritySettings(); + return security.allowOutsideFilesDir || pathInDirectory(filePath, FILES_DIR); +} + // Download a file from URL to local path -function downloadFile(url: string, destPath: string): Promise { +function downloadFile(url: string, destPath: string, redirectCount = 0): Promise { return new Promise((resolve, reject) => { + if (redirectCount > MAX_REDIRECTS) { + reject(new Error('Too many redirects while downloading file')); + return; + } + + let parsedUrl: URL; + try { + parsedUrl = new URL(url); + } catch { + reject(new Error('Invalid download URL')); + return; + } + if (parsedUrl.protocol !== 'https:' && parsedUrl.protocol !== 'http:') { + reject(new Error(`Unsupported URL protocol: ${parsedUrl.protocol}`)); + return; + } + const file = fs.createWriteStream(destPath); - const request = (url.startsWith('https') ? https.get(url, handleResponse) : http.get(url, handleResponse)); + let totalBytes = 0; + let settled = false; + const cleanupFile = () => fs.unlink(destPath, () => {}); + + const request = (parsedUrl.protocol === 'https:' ? https.get(url, handleResponse) : http.get(url, handleResponse)); + request.setTimeout(DOWNLOAD_TIMEOUT_MS, () => { + if (settled) return; + settled = true; + request.destroy(new Error('Download timeout')); + file.destroy(); + cleanupFile(); + reject(new Error('Download timeout')); + }); function handleResponse(response: http.IncomingMessage): void { - if (response.statusCode === 301 || response.statusCode === 302) { + if ([301, 302, 303, 307, 308].includes(response.statusCode || 0)) { const redirectUrl = response.headers.location; if (redirectUrl) { + settled = true; + const absoluteRedirect = new URL(redirectUrl, parsedUrl).toString(); file.close(); - fs.unlinkSync(destPath); - downloadFile(redirectUrl, destPath).then(resolve).catch(reject); + cleanupFile(); + downloadFile(absoluteRedirect, destPath, redirectCount + 1).then(resolve).catch(reject); return; } } + + if ((response.statusCode || 0) < 200 || (response.statusCode || 0) >= 300) { + settled = true; + file.destroy(); + cleanupFile(); + reject(new Error(`Download failed with status ${response.statusCode}`)); + return; + } + + response.on('data', (chunk: Buffer) => { + totalBytes += chunk.length; + if (totalBytes > MAX_DOWNLOAD_BYTES && !settled) { + settled = true; + response.destroy(new Error('Download exceeded size limit')); + request.destroy(new Error('Download exceeded size limit')); + file.destroy(); + cleanupFile(); + reject(new Error(`Download exceeds max size (${MAX_DOWNLOAD_BYTES} bytes)`)); + } + }); + response.pipe(file); - file.on('finish', () => { file.close(); resolve(); }); + file.on('finish', () => { + if (settled) return; + settled = true; + file.close(); + resolve(); + }); } request.on('error', (err) => { - fs.unlink(destPath, () => {}); + if (settled) return; + settled = true; + cleanupFile(); + file.destroy(); + reject(err); + }); + + file.on('error', (err) => { + if (settled) return; + settled = true; + request.destroy(); + cleanupFile(); reject(err); }); }); @@ -166,6 +279,12 @@ function getAgentListText(): string { } } +function isSenderAllowed(senderId: string): boolean { + const security = readSecuritySettings(); + if (!security.requireSenderAllowlist) return true; + return security.allowedSenders.includes('*') || security.allowedSenders.includes(senderId); +} + // Split long messages for Discord's 2000 char limit function splitMessage(text: string, maxLength = 2000): string[] { if (text.length <= maxLength) { @@ -242,6 +361,13 @@ client.on(Events.MessageCreate, async (message: Message) => { } const sender = message.author.username; + const senderId = message.author.id; + + if (!isSenderAllowed(senderId)) { + log('WARN', `Blocked unauthorized sender: ${sender} (${senderId})`); + await message.reply(`Access denied. Sender ID ${senderId} is not allowlisted.`); + return; + } // Generate unique message ID const messageId = `${Date.now()}_${Math.random().toString(36).substring(7)}`; @@ -311,7 +437,7 @@ client.on(Events.MessageCreate, async (message: Message) => { const queueData: QueueData = { channel: 'discord', sender: sender, - senderId: message.author.id, + senderId: senderId, message: fullMessage, timestamp: Date.now(), messageId: messageId, @@ -370,7 +496,10 @@ async function checkOutgoingQueue(): Promise { const attachments: AttachmentBuilder[] = []; for (const file of responseData.files) { try { - if (!fs.existsSync(file)) continue; + if (!isAllowedOutgoingFile(file)) { + log('WARN', `Blocked unsafe outbound file path: ${file}`); + continue; + } attachments.push(new AttachmentBuilder(file)); } catch (fileErr) { log('ERROR', `Failed to prepare file ${file}: ${(fileErr as Error).message}`); diff --git a/src/channels/telegram-client.ts b/src/channels/telegram-client.ts index f7f81bc..47e5a05 100644 --- a/src/channels/telegram-client.ts +++ b/src/channels/telegram-client.ts @@ -24,6 +24,9 @@ const QUEUE_OUTGOING = path.join(TINYCLAW_HOME, 'queue/outgoing'); const LOG_FILE = path.join(TINYCLAW_HOME, 'logs/telegram.log'); const SETTINGS_FILE = path.join(TINYCLAW_HOME, 'settings.json'); const FILES_DIR = path.join(TINYCLAW_HOME, 'files'); +const MAX_DOWNLOAD_BYTES = Number(process.env.TINYCLAW_MAX_DOWNLOAD_BYTES || (25 * 1024 * 1024)); +const DOWNLOAD_TIMEOUT_MS = Number(process.env.TINYCLAW_DOWNLOAD_TIMEOUT_MS || 30000); +const MAX_REDIRECTS = 5; // Ensure directories exist [QUEUE_INCOMING, QUEUE_OUTGOING, path.dirname(LOG_FILE), FILES_DIR].forEach(dir => { @@ -90,6 +93,48 @@ function buildUniqueFilePath(dir: string, preferredName: string): string { return candidate; } +function pathInDirectory(candidatePath: string, directoryPath: string): boolean { + try { + const resolvedDir = fs.realpathSync(directoryPath); + const resolvedFile = fs.realpathSync(candidatePath); + if (resolvedFile === resolvedDir) return true; + const dirWithSep = resolvedDir.endsWith(path.sep) ? resolvedDir : `${resolvedDir}${path.sep}`; + return resolvedFile.startsWith(dirWithSep); + } catch { + return false; + } +} + +function readSecuritySettings(): { + requireSenderAllowlist: boolean; + allowOutsideFilesDir: boolean; + allowedSenders: string[]; +} { + try { + const settingsData = fs.readFileSync(SETTINGS_FILE, 'utf8'); + const settings = JSON.parse(settingsData); + return { + requireSenderAllowlist: settings?.security?.require_sender_allowlist !== false, + allowOutsideFilesDir: settings?.security?.allow_outbound_file_paths_outside_files_dir === true, + allowedSenders: settings?.security?.allowed_senders?.telegram || [], + }; + } catch { + return { + requireSenderAllowlist: true, + allowOutsideFilesDir: false, + allowedSenders: [], + }; + } +} + +function isAllowedOutgoingFile(filePath: string): boolean { + if (!fs.existsSync(filePath)) return false; + const stat = fs.statSync(filePath); + if (!stat.isFile()) return false; + const security = readSecuritySettings(); + return security.allowOutsideFilesDir || pathInDirectory(filePath, FILES_DIR); +} + // Track pending messages (waiting for response) const pendingMessages = new Map(); let processingOutgoingQueue = false; @@ -148,6 +193,12 @@ function getAgentListText(): string { } } +function isSenderAllowed(senderId: string): boolean { + const security = readSecuritySettings(); + if (!security.requireSenderAllowlist) return true; + return security.allowedSenders.includes('*') || security.allowedSenders.includes(senderId); +} + // Split long messages for Telegram's 4096 char limit function splitMessage(text: string, maxLength = 4096): string[] { if (text.length <= maxLength) { @@ -184,27 +235,95 @@ function splitMessage(text: string, maxLength = 4096): string[] { } // Download a file from URL to local path -function downloadFile(url: string, destPath: string): Promise { +function downloadFile(url: string, destPath: string, redirectCount = 0): Promise { return new Promise((resolve, reject) => { + if (redirectCount > MAX_REDIRECTS) { + reject(new Error('Too many redirects while downloading file')); + return; + } + + let parsedUrl: URL; + try { + parsedUrl = new URL(url); + } catch { + reject(new Error('Invalid download URL')); + return; + } + if (parsedUrl.protocol !== 'https:' && parsedUrl.protocol !== 'http:') { + reject(new Error(`Unsupported URL protocol: ${parsedUrl.protocol}`)); + return; + } + const file = fs.createWriteStream(destPath); - const request = (url.startsWith('https') ? https.get(url, handleResponse) : http.get(url, handleResponse)); + let totalBytes = 0; + let settled = false; + const cleanupFile = () => fs.unlink(destPath, () => {}); + + const request = (parsedUrl.protocol === 'https:' ? https.get(url, handleResponse) : http.get(url, handleResponse)); + request.setTimeout(DOWNLOAD_TIMEOUT_MS, () => { + if (settled) return; + settled = true; + request.destroy(new Error('Download timeout')); + file.destroy(); + cleanupFile(); + reject(new Error('Download timeout')); + }); function handleResponse(response: http.IncomingMessage): void { - if (response.statusCode === 301 || response.statusCode === 302) { + if ([301, 302, 303, 307, 308].includes(response.statusCode || 0)) { const redirectUrl = response.headers.location; if (redirectUrl) { + settled = true; + const absoluteRedirect = new URL(redirectUrl, parsedUrl).toString(); file.close(); - fs.unlinkSync(destPath); - downloadFile(redirectUrl, destPath).then(resolve).catch(reject); + cleanupFile(); + downloadFile(absoluteRedirect, destPath, redirectCount + 1).then(resolve).catch(reject); return; } } + + if ((response.statusCode || 0) < 200 || (response.statusCode || 0) >= 300) { + settled = true; + file.destroy(); + cleanupFile(); + reject(new Error(`Download failed with status ${response.statusCode}`)); + return; + } + + response.on('data', (chunk: Buffer) => { + totalBytes += chunk.length; + if (totalBytes > MAX_DOWNLOAD_BYTES && !settled) { + settled = true; + response.destroy(new Error('Download exceeded size limit')); + request.destroy(new Error('Download exceeded size limit')); + file.destroy(); + cleanupFile(); + reject(new Error(`Download exceeds max size (${MAX_DOWNLOAD_BYTES} bytes)`)); + } + }); + response.pipe(file); - file.on('finish', () => { file.close(); resolve(); }); + file.on('finish', () => { + if (settled) return; + settled = true; + file.close(); + resolve(); + }); } request.on('error', (err) => { - fs.unlink(destPath, () => {}); // Clean up on error + if (settled) return; + settled = true; + cleanupFile(); + file.destroy(); + reject(err); + }); + + file.on('error', (err) => { + if (settled) return; + settled = true; + request.destroy(); + cleanupFile(); reject(err); }); }); @@ -331,6 +450,14 @@ bot.on('message', async (msg: TelegramBot.Message) => { : 'Unknown'; const senderId = msg.from ? msg.from.id.toString() : msg.chat.id.toString(); + if (!isSenderAllowed(senderId)) { + log('WARN', `Blocked unauthorized sender: ${sender} (${senderId})`); + await bot.sendMessage(msg.chat.id, `Access denied. Sender ID ${senderId} is not allowlisted.`, { + reply_to_message_id: msg.message_id, + }); + return; + } + log('INFO', `Message from ${sender}: ${messageText.substring(0, 50)}${downloadedFiles.length > 0 ? ` [+${downloadedFiles.length} file(s)]` : ''}...`); // Check for agent list command @@ -440,7 +567,10 @@ async function checkOutgoingQueue(): Promise { if (responseData.files && responseData.files.length > 0) { for (const file of responseData.files) { try { - if (!fs.existsSync(file)) continue; + if (!isAllowedOutgoingFile(file)) { + log('WARN', `Blocked unsafe outbound file path: ${file}`); + continue; + } const ext = path.extname(file).toLowerCase(); if (['.jpg', '.jpeg', '.png', '.gif', '.webp'].includes(ext)) { await bot.sendPhoto(pending.chatId, file); diff --git a/src/channels/whatsapp-client.ts b/src/channels/whatsapp-client.ts index 879266f..48a1db5 100644 --- a/src/channels/whatsapp-client.ts +++ b/src/channels/whatsapp-client.ts @@ -21,6 +21,7 @@ const LOG_FILE = path.join(TINYCLAW_HOME, 'logs/whatsapp.log'); const SESSION_DIR = path.join(SCRIPT_DIR, '.tinyclaw/whatsapp-session'); const SETTINGS_FILE = path.join(TINYCLAW_HOME, 'settings.json'); const FILES_DIR = path.join(TINYCLAW_HOME, 'files'); +const MAX_DOWNLOAD_BYTES = Number(process.env.TINYCLAW_MAX_DOWNLOAD_BYTES || (25 * 1024 * 1024)); // Ensure directories exist [QUEUE_INCOMING, QUEUE_OUTGOING, path.dirname(LOG_FILE), SESSION_DIR, FILES_DIR].forEach(dir => { @@ -79,11 +80,58 @@ function extFromMime(mime?: string): string { return map[mime] || `.${mime.split('/')[1] || 'bin'}`; } +function pathInDirectory(candidatePath: string, directoryPath: string): boolean { + try { + const resolvedDir = fs.realpathSync(directoryPath); + const resolvedFile = fs.realpathSync(candidatePath); + if (resolvedFile === resolvedDir) return true; + const dirWithSep = resolvedDir.endsWith(path.sep) ? resolvedDir : `${resolvedDir}${path.sep}`; + return resolvedFile.startsWith(dirWithSep); + } catch { + return false; + } +} + +function readSecuritySettings(): { + requireSenderAllowlist: boolean; + allowOutsideFilesDir: boolean; + allowedSenders: string[]; +} { + try { + const settingsData = fs.readFileSync(SETTINGS_FILE, 'utf8'); + const settings = JSON.parse(settingsData); + return { + requireSenderAllowlist: settings?.security?.require_sender_allowlist !== false, + allowOutsideFilesDir: settings?.security?.allow_outbound_file_paths_outside_files_dir === true, + allowedSenders: settings?.security?.allowed_senders?.whatsapp || [], + }; + } catch { + return { + requireSenderAllowlist: true, + allowOutsideFilesDir: false, + allowedSenders: [], + }; + } +} + +function isAllowedOutgoingFile(filePath: string): boolean { + if (!fs.existsSync(filePath)) return false; + const stat = fs.statSync(filePath); + if (!stat.isFile()) return false; + const security = readSecuritySettings(); + return security.allowOutsideFilesDir || pathInDirectory(filePath, FILES_DIR); +} + // Download media from a WhatsApp message and save to FILES_DIR async function downloadWhatsAppMedia(message: Message, queueMessageId: string): Promise { try { const media = await message.downloadMedia(); if (!media || !media.data) return null; + const approxBytes = Math.floor((media.data.length * 3) / 4); + if (approxBytes > MAX_DOWNLOAD_BYTES) { + log('WARN', `Rejected oversized media (${approxBytes} bytes)`); + return null; + } const ext = message.type === MessageTypes.DOCUMENT && (message as any)._data?.filename ? path.extname((message as any)._data.filename) @@ -160,6 +208,12 @@ function getAgentListText(): string { } } +function isSenderAllowed(senderId: string): boolean { + const security = readSecuritySettings(); + if (!security.requireSenderAllowlist) return true; + return security.allowedSenders.includes('*') || security.allowedSenders.includes(senderId); +} + // Initialize WhatsApp client const client = new Client({ authStrategy: new LocalAuth({ @@ -240,12 +294,19 @@ client.on('message_create', async (message: Message) => { const chat = await message.getChat(); const contact = await message.getContact(); const sender = contact.pushname || contact.name || message.from; + const senderId = message.from; // Skip group messages if (chat.isGroup) { return; } + if (!isSenderAllowed(senderId)) { + log('WARN', `Blocked unauthorized sender: ${sender} (${senderId})`); + await message.reply(`Access denied. Sender ID ${senderId} is not allowlisted.`); + return; + } + // Generate unique message ID const messageId = `${Date.now()}_${Math.random().toString(36).substring(7)}`; @@ -311,7 +372,7 @@ client.on('message_create', async (message: Message) => { const queueData: QueueData = { channel: 'whatsapp', sender: sender, - senderId: message.from, + senderId: senderId, message: fullMessage, timestamp: Date.now(), messageId: messageId, @@ -369,7 +430,10 @@ async function checkOutgoingQueue(): Promise { if (responseData.files && responseData.files.length > 0) { for (const file of responseData.files) { try { - if (!fs.existsSync(file)) continue; + if (!isAllowedOutgoingFile(file)) { + log('WARN', `Blocked unsafe outbound file path: ${file}`); + continue; + } const media = MessageMedia.fromFilePath(file); await pending.chat.sendMessage(media); log('INFO', `Sent file to WhatsApp: ${path.basename(file)}`); diff --git a/src/lib/config.ts b/src/lib/config.ts index 65c5b99..6673473 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -15,6 +15,7 @@ export const RESET_FLAG = path.join(TINYCLAW_HOME, 'reset_flag'); export const SETTINGS_FILE = path.join(TINYCLAW_HOME, 'settings.json'); export const EVENTS_DIR = path.join(TINYCLAW_HOME, 'events'); export const CHATS_DIR = path.join(TINYCLAW_HOME, 'chats'); +export const FILES_DIR = path.join(TINYCLAW_HOME, 'files'); export function getSettings(): Settings { try { diff --git a/src/lib/invoke.ts b/src/lib/invoke.ts index 471eaf2..c54301c 100644 --- a/src/lib/invoke.ts +++ b/src/lib/invoke.ts @@ -54,7 +54,8 @@ export async function invokeAgent( workspacePath: string, shouldReset: boolean, agents: Record = {}, - teams: Record = {} + teams: Record = {}, + allowDangerousFlags = false ): Promise { // Ensure agent directory exists with config files const agentDir = path.join(workspacePath, agentId); @@ -93,7 +94,12 @@ export async function invokeAgent( if (modelId) { codexArgs.push('--model', modelId); } - codexArgs.push('--skip-git-repo-check', '--dangerously-bypass-approvals-and-sandbox', '--json', message); + codexArgs.push('--skip-git-repo-check'); + if (allowDangerousFlags) { + codexArgs.push('--dangerously-bypass-approvals-and-sandbox'); + log('WARN', `Dangerous Codex flags enabled for agent: ${agentId}`); + } + codexArgs.push('--json', message); const codexOutput = await runCommand('codex', codexArgs, workingDir); @@ -123,7 +129,11 @@ export async function invokeAgent( } const modelId = resolveClaudeModel(agent.model); - const claudeArgs = ['--dangerously-skip-permissions']; + const claudeArgs: string[] = []; + if (allowDangerousFlags) { + claudeArgs.push('--dangerously-skip-permissions'); + log('WARN', `Dangerous Claude flags enabled for agent: ${agentId}`); + } if (modelId) { claudeArgs.push('--model', modelId); } diff --git a/src/lib/logging.ts b/src/lib/logging.ts index d91baba..57b719e 100644 --- a/src/lib/logging.ts +++ b/src/lib/logging.ts @@ -2,6 +2,9 @@ import fs from 'fs'; import path from 'path'; import { LOG_FILE, EVENTS_DIR } from './config'; +const EVENT_RETENTION_MS = Number(process.env.TINYCLAW_EVENT_RETENTION_MS || (24 * 60 * 60 * 1000)); +let lastEventCleanup = 0; + export function log(level: string, message: string): void { const timestamp = new Date().toISOString(); const logMessage = `[${timestamp}] [${level}] ${message}\n`; @@ -18,6 +21,22 @@ export function emitEvent(type: string, data: Record): void { if (!fs.existsSync(EVENTS_DIR)) { fs.mkdirSync(EVENTS_DIR, { recursive: true }); } + const now = Date.now(); + if (now - lastEventCleanup > 60_000) { + lastEventCleanup = now; + const files = fs.readdirSync(EVENTS_DIR).filter(f => f.endsWith('.json')); + for (const file of files) { + const filePath = path.join(EVENTS_DIR, file); + try { + const stat = fs.statSync(filePath); + if (now - stat.mtimeMs > EVENT_RETENTION_MS) { + fs.unlinkSync(filePath); + } + } catch { + // Best-effort cleanup only. + } + } + } const event = { type, timestamp: Date.now(), ...data }; const filename = `${Date.now()}-${Math.random().toString(36).slice(2, 8)}.json`; fs.writeFileSync(path.join(EVENTS_DIR, filename), JSON.stringify(event) + '\n'); diff --git a/src/lib/types.ts b/src/lib/types.ts index 1a20267..62729eb 100644 --- a/src/lib/types.ts +++ b/src/lib/types.ts @@ -41,6 +41,17 @@ export interface Settings { monitoring?: { heartbeat_interval?: number; }; + security?: { + require_sender_allowlist?: boolean; + allowed_senders?: { + discord?: string[]; + telegram?: string[]; + whatsapp?: string[]; + }; + allow_dangerous_agent_flags?: boolean; + allow_outbound_file_paths_outside_files_dir?: boolean; + persist_team_chats?: boolean; + }; } export interface MessageData { diff --git a/src/queue-processor.ts b/src/queue-processor.ts index 1844d6a..167a2a4 100644 --- a/src/queue-processor.ts +++ b/src/queue-processor.ts @@ -12,10 +12,10 @@ import fs from 'fs'; import path from 'path'; -import { MessageData, ResponseData, QueueFile, ChainStep, TeamConfig } from './lib/types'; +import { MessageData, ResponseData, QueueFile, ChainStep, TeamConfig, Settings } from './lib/types'; import { QUEUE_INCOMING, QUEUE_OUTGOING, QUEUE_PROCESSING, - LOG_FILE, RESET_FLAG, EVENTS_DIR, CHATS_DIR, + LOG_FILE, RESET_FLAG, EVENTS_DIR, CHATS_DIR, FILES_DIR, getSettings, getAgents, getTeams } from './lib/config'; import { log, emitEvent } from './lib/logging'; @@ -23,12 +23,57 @@ import { parseAgentRouting, findTeamForAgent, getAgentResetFlag, extractTeammate import { invokeAgent } from './lib/invoke'; // Ensure directories exist -[QUEUE_INCOMING, QUEUE_OUTGOING, QUEUE_PROCESSING, path.dirname(LOG_FILE)].forEach(dir => { +[QUEUE_INCOMING, QUEUE_OUTGOING, QUEUE_PROCESSING, path.dirname(LOG_FILE), FILES_DIR].forEach(dir => { if (!fs.existsSync(dir)) { fs.mkdirSync(dir, { recursive: true }); } }); +function redactForLogs(text: string, maxLength: number): string { + const redacted = text + .replace(/\[file:\s*[^\]]+\]/gi, '[file]') + .replace(/\[send_file:\s*[^\]]+\]/gi, '[send_file]'); + return redacted.length > maxLength ? `${redacted.substring(0, maxLength)}...` : redacted; +} + +function pathInDirectory(candidatePath: string, directoryPath: string): boolean { + try { + const resolvedDir = fs.realpathSync(directoryPath); + const resolvedFile = fs.realpathSync(candidatePath); + if (resolvedFile === resolvedDir) return true; + const dirWithSep = resolvedDir.endsWith(path.sep) ? resolvedDir : `${resolvedDir}${path.sep}`; + return resolvedFile.startsWith(dirWithSep); + } catch { + return false; + } +} + +function isSafeOutboundFile(filePath: string, settings: Settings): boolean { + if (!fs.existsSync(filePath)) return false; + const stat = fs.statSync(filePath); + if (!stat.isFile()) return false; + + if (settings.security?.allow_outbound_file_paths_outside_files_dir === true) { + return true; + } + return pathInDirectory(filePath, FILES_DIR); +} + +function isSenderAuthorized(messageData: MessageData, settings: Settings): boolean { + if (messageData.channel === 'heartbeat') return true; + + const requireAllowlist = settings.security?.require_sender_allowlist !== false; + if (!requireAllowlist) return true; + + const allowedByChannel = settings.security?.allowed_senders || {}; + const allowed = (allowedByChannel as Record)[messageData.channel] || []; + if (allowed.includes('*')) return true; + + const senderId = (messageData.senderId || '').trim(); + if (!senderId) return false; + return allowed.includes(senderId); +} + // Process a single message async function processMessage(messageFile: string): Promise { const processingFile = path.join(QUEUE_PROCESSING, path.basename(messageFile)); @@ -39,15 +84,33 @@ async function processMessage(messageFile: string): Promise { // Read message const messageData: MessageData = JSON.parse(fs.readFileSync(processingFile, 'utf8')); - const { channel, sender, message: rawMessage, timestamp, messageId } = messageData; + const { channel, sender, senderId, message: rawMessage, messageId } = messageData; + const safeMessagePreview = redactForLogs(rawMessage, 120); - log('INFO', `Processing [${channel}] from ${sender}: ${rawMessage.substring(0, 50)}...`); - emitEvent('message_received', { channel, sender, message: rawMessage.substring(0, 120), messageId }); + log('INFO', `Processing [${channel}] from ${sender}: ${redactForLogs(rawMessage, 50)}...`); + emitEvent('message_received', { channel, sender, message: safeMessagePreview, messageId }); // Get settings, agents, and teams const settings = getSettings(); const agents = getAgents(settings); const teams = getTeams(settings); + const allowDangerousFlags = settings.security?.allow_dangerous_agent_flags === true; + + if (!isSenderAuthorized(messageData, settings)) { + const responseFile = path.join(QUEUE_OUTGOING, path.basename(processingFile)); + const responseData: ResponseData = { + channel, + sender, + message: `Access denied. Sender is not allowlisted for ${channel}. Sender ID: ${senderId || 'unknown'}`, + originalMessage: rawMessage, + timestamp: Date.now(), + messageId, + }; + fs.writeFileSync(responseFile, JSON.stringify(responseData, null, 2)); + fs.unlinkSync(processingFile); + log('WARN', `Blocked unauthorized sender on ${channel}: ${sender} (${senderId || 'unknown'})`); + return; + } // Get workspace path from settings const workspacePath = settings?.workspace?.path || path.join(require('os').homedir(), 'tinyclaw-workspace'); @@ -138,7 +201,9 @@ async function processMessage(messageFile: string): Promise { if (!teamContext) { // No team context — single agent invocation (backward compatible) try { - finalResponse = await invokeAgent(agent, agentId, message, workspacePath, shouldReset, agents, teams); + finalResponse = await invokeAgent( + agent, agentId, message, workspacePath, shouldReset, agents, teams, allowDangerousFlags + ); } catch (error) { const provider = agent.provider || 'anthropic'; log('ERROR', `${provider === 'openai' ? 'Codex' : 'Claude'} error (agent: ${agentId}): ${(error as Error).message}`); @@ -176,7 +241,9 @@ async function processMessage(messageFile: string): Promise { let stepResponse: string; try { - stepResponse = await invokeAgent(currentAgent, currentAgentId, currentMessage, workspacePath, currentShouldReset, agents, teams); + stepResponse = await invokeAgent( + currentAgent, currentAgentId, currentMessage, workspacePath, currentShouldReset, agents, teams, allowDangerousFlags + ); } catch (error) { const provider = currentAgent.provider || 'anthropic'; log('ERROR', `${provider === 'openai' ? 'Codex' : 'Claude'} error (agent: ${currentAgentId}): ${(error as Error).message}`); @@ -184,15 +251,23 @@ async function processMessage(messageFile: string): Promise { } chainSteps.push({ agentId: currentAgentId, response: stepResponse }); - emitEvent('chain_step_done', { teamId: teamContext.teamId, step: chainSteps.length, agentId: currentAgentId, responseLength: stepResponse.length, responseText: stepResponse }); + emitEvent('chain_step_done', { + teamId: teamContext.teamId, + step: chainSteps.length, + agentId: currentAgentId, + responseLength: stepResponse.length, + responsePreview: redactForLogs(stepResponse, 120), + }); // Collect files from this step const stepFileRegex = /\[send_file:\s*([^\]]+)\]/g; let stepFileMatch: RegExpExecArray | null; while ((stepFileMatch = stepFileRegex.exec(stepResponse)) !== null) { const filePath = stepFileMatch[1].trim(); - if (fs.existsSync(filePath)) { + if (isSafeOutboundFile(filePath, settings)) { allFiles.add(filePath); + } else { + log('WARN', `Blocked unsafe outbound file path from @${currentAgentId}: ${filePath}`); } } @@ -236,13 +311,21 @@ async function processMessage(messageFile: string): Promise { let mResponse: string; try { const mMessage = `[Message from teammate @${currentAgentId}]:\n${mention.message}`; - mResponse = await invokeAgent(mAgent, mention.teammateId, mMessage, workspacePath, mShouldReset, agents, teams); + mResponse = await invokeAgent( + mAgent, mention.teammateId, mMessage, workspacePath, mShouldReset, agents, teams, allowDangerousFlags + ); } catch (error) { log('ERROR', `Fan-out error (agent: ${mention.teammateId}): ${(error as Error).message}`); mResponse = "Sorry, I encountered an error processing this request."; } - emitEvent('chain_step_done', { teamId: teamContext!.teamId, step: chainSteps.length + 1, agentId: mention.teammateId, responseLength: mResponse.length, responseText: mResponse }); + emitEvent('chain_step_done', { + teamId: teamContext!.teamId, + step: chainSteps.length + 1, + agentId: mention.teammateId, + responseLength: mResponse.length, + responsePreview: redactForLogs(mResponse, 120), + }); return { agentId: mention.teammateId, response: mResponse }; }) ); @@ -255,7 +338,11 @@ async function processMessage(messageFile: string): Promise { let fanFileMatch: RegExpExecArray | null; while ((fanFileMatch = fanFileRegex.exec(result.response)) !== null) { const filePath = fanFileMatch[1].trim(); - if (fs.existsSync(filePath)) allFiles.add(filePath); + if (isSafeOutboundFile(filePath, settings)) { + allFiles.add(filePath); + } else { + log('WARN', `Blocked unsafe outbound file path from @${result.agentId}: ${filePath}`); + } } } @@ -274,42 +361,44 @@ async function processMessage(messageFile: string): Promise { .join('\n\n---\n\n'); } - // Write chain chat history to .tinyclaw/chats - try { - const teamChatsDir = path.join(CHATS_DIR, teamContext.teamId); - if (!fs.existsSync(teamChatsDir)) { - fs.mkdirSync(teamChatsDir, { recursive: true }); - } - const chatLines: string[] = []; - chatLines.push(`# Team Chain: ${teamContext.team.name} (@${teamContext.teamId})`); - chatLines.push(`**Date:** ${new Date().toISOString()}`); - chatLines.push(`**Channel:** ${channel} | **Sender:** ${sender}`); - chatLines.push(`**Steps:** ${chainSteps.length}`); - chatLines.push(''); - chatLines.push('---'); - chatLines.push(''); - chatLines.push(`## User Message`); - chatLines.push(''); - chatLines.push(rawMessage); - chatLines.push(''); - for (let i = 0; i < chainSteps.length; i++) { - const step = chainSteps[i]; - const stepAgent = agents[step.agentId]; - const stepLabel = stepAgent ? `${stepAgent.name} (@${step.agentId})` : `@${step.agentId}`; + // Persist full team chats only when explicitly enabled. + if (settings.security?.persist_team_chats === true) { + try { + const teamChatsDir = path.join(CHATS_DIR, teamContext.teamId); + if (!fs.existsSync(teamChatsDir)) { + fs.mkdirSync(teamChatsDir, { recursive: true }); + } + const chatLines: string[] = []; + chatLines.push(`# Team Chain: ${teamContext.team.name} (@${teamContext.teamId})`); + chatLines.push(`**Date:** ${new Date().toISOString()}`); + chatLines.push(`**Channel:** ${channel} | **Sender:** ${sender}`); + chatLines.push(`**Steps:** ${chainSteps.length}`); + chatLines.push(''); chatLines.push('---'); chatLines.push(''); - chatLines.push(`## Step ${i + 1}: ${stepLabel}`); + chatLines.push(`## User Message`); chatLines.push(''); - chatLines.push(step.response); + chatLines.push(redactForLogs(rawMessage, 4000)); chatLines.push(''); + for (let i = 0; i < chainSteps.length; i++) { + const step = chainSteps[i]; + const stepAgent = agents[step.agentId]; + const stepLabel = stepAgent ? `${stepAgent.name} (@${step.agentId})` : `@${step.agentId}`; + chatLines.push('---'); + chatLines.push(''); + chatLines.push(`## Step ${i + 1}: ${stepLabel}`); + chatLines.push(''); + chatLines.push(redactForLogs(step.response, 4000)); + chatLines.push(''); + } + const now = new Date(); + const dateTime = now.toISOString().replace(/[:.]/g, '-').replace('T', '_').replace('Z', ''); + const chatFilename = `${dateTime}.md`; + fs.writeFileSync(path.join(teamChatsDir, chatFilename), chatLines.join('\n')); + log('INFO', `Chain chat history saved to ${chatFilename}`); + } catch (e) { + log('ERROR', `Failed to save chain chat history: ${(e as Error).message}`); } - const now = new Date(); - const dateTime = now.toISOString().replace(/[:.]/g, '-').replace('T', '_').replace('Z', ''); - const chatFilename = `${dateTime}.md`; - fs.writeFileSync(path.join(teamChatsDir, chatFilename), chatLines.join('\n')); - log('INFO', `Chain chat history saved to ${chatFilename}`); - } catch (e) { - log('ERROR', `Failed to save chain chat history: ${(e as Error).message}`); } } @@ -320,16 +409,16 @@ async function processMessage(messageFile: string): Promise { let fileMatch: RegExpExecArray | null; while ((fileMatch = fileRefRegex.exec(finalResponse)) !== null) { const filePath = fileMatch[1].trim(); - if (fs.existsSync(filePath)) { + if (isSafeOutboundFile(filePath, settings)) { outboundFilesSet.add(filePath); + } else { + log('WARN', `Blocked unsafe outbound file path in final response: ${filePath}`); } } const outboundFiles = Array.from(outboundFilesSet); - // Remove the [send_file: ...] tags from the response text - if (outboundFiles.length > 0) { - finalResponse = finalResponse.replace(fileRefRegex, '').trim(); - } + // Always remove [send_file: ...] tags from user-facing response text. + finalResponse = finalResponse.replace(fileRefRegex, '').trim(); // Limit response length after tags are parsed and removed if (finalResponse.length > 4000) { @@ -356,7 +445,7 @@ async function processMessage(messageFile: string): Promise { fs.writeFileSync(responseFile, JSON.stringify(responseData, null, 2)); log('INFO', `✓ Response ready [${channel}] ${sender} via agent:${agentId} (${finalResponse.length} chars)`); - emitEvent('response_ready', { channel, sender, agentId, responseLength: finalResponse.length, responseText: finalResponse, messageId }); + emitEvent('response_ready', { channel, sender, agentId, responseLength: finalResponse.length, messageId }); // Clean up processing file fs.unlinkSync(processingFile);