diff --git a/.env.example b/.env.example index 3e9d3347..1c3be079 100644 --- a/.env.example +++ b/.env.example @@ -13,6 +13,10 @@ INKOS_LLM_API_KEY=sk-your-key-here # Model name INKOS_LLM_MODEL=gpt-4o +# Optional: set a custom User-Agent header for LLM HTTP requests +# Some gateways/WAFs may block requests without a browser-like UA. +INKOS_HTTP_USER_AGENT=Mozilla/5.0 (compatible; InkOS) + # Notifications (optional) INKOS_TELEGRAM_BOT_TOKEN= INKOS_TELEGRAM_CHAT_ID= diff --git a/packages/cli/src/commands/init.ts b/packages/cli/src/commands/init.ts index 0b360717..6d35bb89 100644 --- a/packages/cli/src/commands/init.ts +++ b/packages/cli/src/commands/init.ts @@ -71,10 +71,10 @@ export const initCommand = new Command("init") "# Global config at ~/.inkos/.env will be used by default.", "# Uncomment below to override for this project only:", "# INKOS_LLM_PROVIDER=openai", - "# INKOS_LLM_BASE_URL=", - "# INKOS_LLM_API_KEY=", - "# INKOS_LLM_MODEL=", - "", + "# INKOS_LLM_BASE_URL=https://api.openai.com/v1", + "# INKOS_LLM_API_KEY=your-api-key-here", + "# INKOS_LLM_MODEL=gpt-4o", + "# INKOS_HTTP_USER_AGENT=Mozilla/5.0 (compatible; InkOS)", "# Web search (optional):", "# TAVILY_API_KEY=tvly-xxxxx", ].join("\n"), @@ -97,6 +97,7 @@ export const initCommand = new Command("init") "# INKOS_LLM_MAX_TOKENS=8192", "# INKOS_LLM_THINKING_BUDGET=0 # Anthropic extended thinking budget", "# INKOS_LLM_API_FORMAT=chat # chat (default) or responses (OpenAI Responses API)", + "# INKOS_HTTP_USER_AGENT=Mozilla/5.0 (compatible; InkOS)", "", "# Web search (optional, for auditor era-research):", "# TAVILY_API_KEY=tvly-xxxxx # Free at tavily.com (1000 searches/month)", diff --git a/packages/core/src/llm/provider.ts b/packages/core/src/llm/provider.ts index 30326909..288b21a8 100644 --- a/packages/core/src/llm/provider.ts +++ b/packages/core/src/llm/provider.ts @@ -2,6 +2,10 @@ import OpenAI from "openai"; import Anthropic from "@anthropic-ai/sdk"; import type { LLMConfig } from "../models/project.js"; +function resolveUserAgent(): string { + const fromEnv = process.env.INKOS_HTTP_USER_AGENT?.trim(); + if (fromEnv) return fromEnv; + return "Mozilla/5.0 (compatible; InkOS)"; // === Streaming Monitor Types === export interface StreamProgress { @@ -118,6 +122,7 @@ export function createLLMClient(config: LLMConfig): LLMClient { extra: config.extra ?? {}, }; + const userAgent = resolveUserAgent(); const apiFormat = config.apiFormat ?? "chat"; const stream = config.stream ?? true; @@ -128,7 +133,11 @@ export function createLLMClient(config: LLMConfig): LLMClient { provider: "anthropic", apiFormat, stream, - _anthropic: new Anthropic({ apiKey: config.apiKey, baseURL }), + _anthropic: new Anthropic({ + apiKey: config.apiKey, + baseURL, + defaultHeaders: { "User-Agent": userAgent }, + }), defaults, }; } @@ -137,7 +146,11 @@ export function createLLMClient(config: LLMConfig): LLMClient { provider: "openai", apiFormat, stream, - _openai: new OpenAI({ apiKey: config.apiKey, baseURL: config.baseUrl }), + _openai: new OpenAI({ + apiKey: config.apiKey, + baseURL: config.baseUrl, + defaultHeaders: { "User-Agent": userAgent }, + }), defaults, }; }