From daf7942da9b9800956e0fa01e50e8939bb632dd5 Mon Sep 17 00:00:00 2001 From: darkwu <507639721@qq.com> Date: Tue, 10 Mar 2026 16:50:30 +0800 Subject: [PATCH 1/8] FIX ISSUE:The parameter `max_tokens` specified in the request are not valid: max_tokens and max_completion_tokens cannot be set at the same time;Refactor maxTokens assignment in AbstractLlmService MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 在依赖的agentscope中OpenAIChatFormatter.java第107-110行,代码同时将maxTokens设置到了OpenAIRequest的maxCompletionTokens和maxTokens两个字段中,这导致了API返回400错误,因为这两个参数是互斥的。根据OpenAI API文档,max_completion_tokens是较新的参数,用于替代max_tokens,所以应该只设置其中一个。 --- .../himarket/service/hichat/service/AbstractLlmService.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hichat/service/AbstractLlmService.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hichat/service/AbstractLlmService.java index 5802bfd57..e58d62c0f 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/service/hichat/service/AbstractLlmService.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hichat/service/AbstractLlmService.java @@ -177,7 +177,7 @@ protected ModelFeature getOrDefaultModelFeature(ProductResult product) { return ModelFeature.builder() .model(StrUtil.blankToDefault(modelFeature.getModel(), "qwen-max")) - .maxTokens(ObjectUtil.defaultIfNull(modelFeature.getMaxTokens(), 5000)) + .maxTokens(modelFeature.getMaxTokens()) .temperature(ObjectUtil.defaultIfNull(modelFeature.getTemperature(), 0.9)) .streaming(ObjectUtil.defaultIfNull(modelFeature.getStreaming(), true)) .webSearch(ObjectUtil.defaultIfNull(modelFeature.getWebSearch(), false)) From 1b1df100004b1649d82f099b6d7b1dabb7cc5774 Mon Sep 17 00:00:00 2001 From: daofeng Date: Mon, 16 Mar 2026 16:36:09 +0800 Subject: [PATCH 2/8] =?UTF-8?q?feat:=20=E9=9B=86=E6=88=90=20HiCoding=20?= =?UTF-8?q?=E6=A8=A1=E5=9D=97=E4=B8=8E=20Skill=20=E5=B8=82=E5=9C=BA?= =?UTF-8?q?=E5=8A=9F=E8=83=BD=20(#178)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(coding): 添加 HiCoding 功能模块 - 新增 AgentMessage 组件用于显示 AI 消息 - 实现 ChatStream 组件用于聊天消息流展示 - 创建 CodingInput 组件提供编码会话输入功能 - 开发 CodingSidebar 组件管理编码任务列表 - 构建 CodingTopBar 组件显示状态和配置选项 - 添加 CodingWelcome 组件作为欢迎界面 - 实现 DiffViewer 组件用于文件差异对比 - 创建 PermissionDialog 组件处理权限请求确认 - 开发 PlanDisplay 组件展示任务计划 - 添加 SlashMenu 组件提供命令菜单功能 - 实现 TerminalOutput 组件显示终端输出 - 创建 ThoughtBlock 组件展示思考过程 - 开发 ToolCallCard 组件显示工具调用卡片 - 添加 ToolPanel 组件提供工具面板功能 - 实现 UserMessage 组件显示用户消息 - 在 Header 组件中添加 HiCoding 导航标签 - 创建 CodingSessionContext 提供编码会话上下文管理 * fix * fix * feat(coding): 添加文件渲染器和预览功能 - 新增 ArtifactRenderer 组件用于根据文件类型渲染不同内容 - 实现 HtmlRenderer、MarkdownRenderer、SvgRenderer、ImageRenderer、PdfRenderer 等渲染器 - 添加 FileRenderer 用于显示文件类型的通用预览界面 - 集成 ArtifactPreview 组件支持文件预览和下载功能 - 在 AgentMessage 中添加 compact 变体样式选项 - 扩展 ChatStream 组件支持工作单元卡片和滚动功能 - 为 CodingInput 添加文件附件上传和拖拽支持功能 - 更新 PlanDisplay 组件支持内联变体和展开收起功能 - 新增 ChangesView 组件用于显示代码变更差异 - 集成 Ant Design Image 组件用于图片预览功能 * docs(acp): 添加 Agent Client Protocol 网站文档和规范说明 - 新增多个 ACP 相关页面文档,包括入门介绍、架构说明、客户端、代理及库列表 - 更新 ACP 注册表说明,涵盖代理列表及使用提交流程 - 添加多语言库支持介绍文档,包含 Kotlin、Python、Rust、TypeScript 和社区维护库 - 补充.gitignore 文件,忽略新的工作空间目录 - 新增 AGENTS.md,规范所有答复均使用简体中文回复 * feat(hicoding): initial commit of HiWork module and integrate multi-tenant AI IDE support - Add comprehensive requirements document detailing HiWork design and user stories - Implement WebSocket-to-stdio ACP proxy and AgentRun lifecycle management specifications - Define multi-tenant session isolation, resource limits, and support for multiple Agent types - Update workspace path handling to properly resolve absolute and relative paths securely - Extend token utility to support JWT expiration with day unit - Increase JWT expiration duration from 2 hours to 7 days in configuration - Rename UI labels from "HiCoding" to "HiWork" to reflect rebranding - Enhance DiffViewer to use 'diff' library for better diff line handling and display - Add user scroll interaction detection and task completion toast with browser notifications - Add queued prompt handling in CodingInput with UI for queued messages management - Introduce unit tests for ChangesView and ChatStream components to improve test coverage - Add new dependencies like 'diff', 'vitest', and testing-library for improved development experience - Include TerminalView component import and adjustments for RightPanel enhancements * refactor(quest): rename coding components and context to quest - Rename all components and context files from 'coding' to 'quest' - Update imports and references in all affected files accordingly - Replace useCodingState and useCodingDispatch hooks with quest equivalents - Rename CodingSessionContext and related types to QuestSessionContext - Adjust routing and page components from /coding to /quest and related names - Introduce auto-permission logic in useAcpSession for permission requests - Modify Header tabs to replace "/coding" path with "/quest" for HiWork tab - Update tests and mocks to use QuestSessionContext instead of CodingSessionContext * feat(coding): add interactive terminal and workspace file tree features - Add TerminalProcess class using pty4j for PTY shell processes - Implement TerminalWebSocketHandler for WebSocket-based shell interaction - Update SecurityConfig to allow /ws/terminal WebSocket endpoint - Register TerminalWebSocketHandler in WebSocketConfig - Add DevProxyController to proxy requests to local dev servers by port - Extend WorkspaceController with API for directory tree retrieval with depth control - Enhance ResponseAdvice to support Mono types in unified response handling - Incorporate terminal-related dependencies in pom.xml (pty4j) - Introduce coding UI components: Header tab, CodingTopBar, EditorArea, FileTree, PreviewPanel, TerminalPanel with xterm.js integration and WebSocket connection - Update frontend dependencies to add @monaco-editor/react, @xterm/xterm, and addons - Add UX improvements for workspace file browsing and terminal output handling - Maintain security and error handling standards in new WebSocket and REST endpoints * feat(quest): add clickable file path links and merge consecutive tool calls - Enhance AgentMessage to linkify file paths with clickable, accessible spans - Add onOpenFile handlers to AgentMessage, ChatStream, WorkUnitCard and Coding page - Implement file path open logic in CodingContent with content fetching and tab switching - Refactor ToolCallCard: introduce EditCard and ExecuteCard components with expandable diffs and commands - Add merged rendering of consecutive auxiliary tool calls in WorkUnitCard with expandable group rows - Improve ThoughtBlock with streaming duration display and adaptive styles - Update UI colors and typography for better consistency in PlanDisplay, ThoughtBlock, and ToolCallCard - Fix styling and accessibility issues on interactive elements across components * feat(quest): add file mention menu with filtering and keyboard navigation - Introduce FileMentionMenu component to display file list with icons and selection support - Implement flattening and filtering utilities for file trees matching input queries - Add "@" mention detection in QuestInput to trigger file mention menu with filter - Enable keyboard navigation (arrow keys, enter) and mouse selection in mention menu - Load and cache workspace file tree asynchronously on first mention trigger - Insert selected file mention text and create file path attachments in input context - Show loading and empty states in mention menu during file fetch and filtering - Update QuestInput state and handlers to support mention menu visibility and interactions * feat(acp): 支持多种 ACP CLI Provider 并实现动态选择 - 扩展配置支持多个 CLI Provider,包括命令、参数和环境变量配置 - 新增 /cli-providers 接口,列出所有可用的 CLI Provider 并检测命令可用性 - WebSocket 握手时支持通过查询参数选择特定 CLI Provider,默认使用全局配置 - AcpProcess 支持传入额外环境变量,增强子进程启动灵活性 - 修改前端 ACL 路由,新增 /cli-providers 路径支持 - 增加了多种 CLI Provider 的单元及集成测试,覆盖初始化、会话与命令可用性检测 - 统一日志与错误处理,清晰标示具体 CLI Provider 的启动与通讯状态 * refactor(ui): 将下拉选择控件替换为Ant Design组件 - 将CliProviderSelect中的原生select替换为Ant Design的Select组件 - 优化CliProviderSelect的样式和交互,支持禁用选项显示 - CodingTopBar和QuestTopBar中模型选择下拉也改用Ant Design的Select - update application.yml,新增opencode CLI命令配置,修改qwen-code命令名链接 * style(frontend): 优化界面字体样式与布局细节 - 统一调整聊天和任务相关组件中的字体大小和行距,提升文本可读性 - 优化各组件的间距和圆角样式,增强界面一致性和美观度 - WorkUnitCard组件改进动作区域的展开逻辑,避免手动切换时自动展开干扰 - PlanDisplay和ThoughtBlock组件调整边框和按钮样式,实现统一的交互体验 - Messages和AgentMessage组件详细调整Markdown渲染的样式类,提高视觉效果 - 更新index.css引入Inter和Noto Sans SC字体,并优化字体平滑显示效果 * feat(hicli): 集成 ACP 调试前端模块 HiCli - 迁移 acp-demo ACP 协议调试功能到 himarket-frontend,形成独立 HiCli 模块 - 扩展 QuestSessionContext,新增调试相关状态和 action,管理原始消息与聚合日志 - 实现 useHiCliSession Hook,支持 WebSocket 连接、日志处理及会话操作 - 新建 HiCli 专用组件(HiCliSelector、HiCliTopBar、AcpLogPanel、AgentInfoCard 等) - 创建 HiCli 页面,集成聊天区和调试面板,支持多会话切换和权限请求 - 使用 Tailwind CSS 与 Ant Design 统一样式,替代 acp-demo 原生 CSS - 添加 /hicli 路由及导航标签,确保与平台其他模块风格和流程一致 - 迁移日志聚合器和日志过滤器,新增对应的属性测试保障正确性 - 完成 Agent 信息展示、ACL 日志面板、调试标签切换等核心功能需求 - 在服务器配置中新增 isolateHome 选项,实现 CLI 工具凭证隔离机制 * feat(acp-error-response): 实现ACP错误响应的对话流展示 - 新增 ChatItemError 类型,支持完整错误码、消息及扩展数据存储 - 在 questReducer 中新增 PROMPT_ERROR action,追加错误消息且更新处理状态 - 修改 useAcpSession 和 useHiCliSession Hook,捕获并分发 PROMPT_ERROR 替代原错误处理 - 创建 ErrorMessage 组件以视觉区分错误信息,支持错误码、消息及扩展数据展示 - 扩展 ChatStream 组件以渲染 error 类型 ChatItem - 完整链路确保ACP错误响应从resolveResponse到UI展示错误信息不丢失 - 使用 fast-check 实现多项属性测试覆盖类型定义、状态更新、Hook行为及UI渲染等关键环节 * feat(runtime): 实现多运行时策略抽象与 K8s 支持 - 引入 RuntimeAdapter 接口及 LocalRuntimeAdapter 和 K8sRuntimeAdapter 实现 - 设计 RuntimeFactory 根据类型创建对应运行时实例 - 统一后端通过 RuntimeAdapter 管理 CLI 进程生命周期和通信 - 前端新增 RuntimeSelector 组件支持运行时方案选择 - useAcpSession Hook 扩展支持运行时参数传递 - 完成文件系统抽象和通信适配,保障跨运行时一致性 - 集成 K8s 客户端动态管理 Pod,实现沙箱隔离和资源限制 - 支持运行时健康检查及异常通知机制,提升稳定性 - 提供详细设计文档和多项属性基测试保障正确性和安全性 * refactor(acp-session): 重构hiwork和hicoding会话为延迟连接模式 - 引入欢迎页和CLI选择器,页面加载时不再自动建立WebSocket连接 - 选择CLI后构建连接URL并建立连接,连接成功后自动创建Quest会话 - 移除QuestTopBar和CodingTopBar顶部栏中的CLI切换下拉框 - 侧边栏底部新增切换工具按钮,实现断开连接并返回欢迎页 - useAcpSession和useAcpWebSocket支持URL为空时不自动连接 - 新增autoCreatedRef状态,断开连接时重置,防止切换后会话丢失 - 提取公共CliSelector组件复用hiwork、hicoding和hicli的CLI选择逻辑 - 设计并计划基于属性的测试确保Reset State、连接管理及状态同步的正确性 * feat(workspace): 限制文件预览大小并优化目录树展示 - 新增文件大小限制,文本文件最大2MB,二进制文件最大20MB,超过限制返回413错误 - 文件预览接口支持根据编码(utf-8/base64)正确返回内容 - 编辑器支持显示图片类型文件,新增ImageRenderer渲染图片内容 - 目录树节点限制最多展示2000个,超出时标记truncated并在UI显示警告信息 - 增加前端文件 * feat(auth-architecture): 设计CLI Agent凭证归属与认证架构 - 制定多租户环境下CLI工具凭证的归属模型,包括平台凭证、用户凭证和混合模式 - 定义开源CLI与商业CLI的认证策略分类及凭证注入机制 - 设计凭证的安全存储方案,支持加密、访问控制和轮换管理 - 设计凭证自动注入机制,实现无感知认证启动体验 - 设计用户凭证管理界面,支持凭证绑定、验证和撤销 - 管理平台凭证与用户用量配额,控制调用额度和访问权限 - 实现认证架构与多运行时环境(Local、K8s)无缝集成 - 定义从POC到生产的认证架构分阶段演进策略 - 优先推荐使用开源CLI与自有模型端点,提升平台控制力 * feat(cli): 实现自定义模型配置支持并集成至CLI启动流程 - 定义 CustomModelConfig 数据模型,支持 baseUrl、apiKey、modelId、modelName、protocolType字段及校验 - 实现 CliConfigGenerator 接口及两种生成器 OpenCodeConfigGenerator 和 QwenCodeConfigGenerator - 支持生成并合并 opencode.json 与 .qwen/settings.json 文件,确保配置文件合法且包含用户自定义模型 - 扩展 AcpHandshakeInterceptor 从 WebSocket 查询参数解析 customModelConfig JSON,存入会话属性 - 在 AcpWebSocketHandler 启动CLI进程前注入自定义模型配置及环境变量,启动失败时清理生成的配置文件 - 扩展 CliProviderConfig 和响应,新增 supportsCustomModel 字段以标识支持自定义模型的CLI工具 - 前端新增 CustomModelForm 组件,动态展现自定义模型配置表单并完成字段校验 - 通过 WebSocket URL 查询参数传递序列化的自定义模型配置,实现前后端联动切换模型 - 在 application.yml 配置文件标记 OpenCode 和 QwenCode 支持自定义模型,默认其他CLI工具不支持 * refactor(mvn): 重命名配置文件并调整数据库迁移脚本版本 - 将 .mvn/jvm.config.bak 重命名为 .mvn/jvm.config - 删除 .mvn/maven.config.bak 文件内容 - 将数据库迁移脚本 V6__Add_k8s_cluster_table.sql 重命名为 V7__Add_k8s_cluster_table.sql * feat(cli-provider): 实现模型市场模型列表接口,支持前端选择已订阅模型 - 新增 BaseUrlExtractor 工具类,实现从产品路由配置提取模型接入点 baseUrl - 新增 ProtocolTypeMapper 工具类,实现协议类型字符串映射 - 在 CliProviderController 中新增 GET /cli-providers/market-models 接口,返回当前开发者已订阅的 MODEL_API 类型模型列表及 apiKey - 接口根据开发者主消费者获取订阅,筛选 APPROVED 状态且产品类型为 MODEL_API 的产品,跳过不符合条件的产品并记录日志 - 组装 MarketModelInfo DTO,包含 productId、name、modelId、baseUrl、protocolType、description - 处理无主消费者、无订阅以及凭证缺失等边界情况,返回空列表及 null apiKey - 日志打印关键警告信息,保证接口鲁棒性与正确性 * feat(cli-mcp-skill): 集成市场 MCP Server 和 Skill 支持 - 新增后端接口 GET /cli-providers/market-mcps,返回已订阅 MCP Server 列表及认证信息 - 新增后端接口 GET /cli-providers/market-skills,返回已发布 Skill 列表,无需认证 - 扩展 CliSessionConfig 数据模型,支持传递 customModelConfig、mcpServers 和 skills - AcpHandshakeInterceptor 支持解析 cliSessionConfig 参数,保持向后兼容 customModelConfig 参数 - AcpWebSocketHandler 启动时调用 generateMcpConfig 和 generateSkillConfig 注入 MCP 和 Skill 配置 - 扩展 CliConfigGenerator 接口,新增默认方法生成 MCP 和 Skill 配置,支持按需覆盖 - QwenCodeConfigGenerator 实现 MCP 配置写入 .qwen/settings.json 和 Skill 配置写入 .agents/skills/*/SKILL.md - 新增前端 MarketMcpSelector 和 MarketSkillSelector 组件,实现 MCP Server 和 Skill 的列表展示及选择 - 扩展公共 CliSelector 组件状态管理,组装完整 CliSessionConfig 并通过 WebSocket 传递 - 扩展前端 API 接口,支持市场 MCP 和 Skill 的获取及 Skill 文件下载 - 增加 CliProviderConfig 和 CliProviderInfo 支持 MCP 和 Skill 的能力标记 - WebSocket URL 支持新增 cliSessionConfig 查询参数,兼容旧参数 customModelConfig - 设计及测试文档详述架构方案、数据模型、错误处理及属性测试策略,保证功能正确性及兼容性 - 实现多种异常及边界处理逻辑,保证前后端稳定运行与良好体验 * test(hooks): 删除 useAcpSession 运行时 webcontainer 相关测试 - 移除 useAcpSession 中 WebContainer 适配器和文件同步服务相关代码 - 删除所有关于 WebContainer 运行时的单元测试用例 - 去除 package.json 中 @webcontainer/api 依赖 - 调整 useAcpSession 测试只覆盖 WebSocket 通信分支 - 移除 webcontainer 模式下的错误处理及连接管理代码 - 删除 FileSyncService 及相关测试代码和依赖 - 简化 useAcpSession 返回,不再包含 runtimeError 和 reconnect 特殊处理 * feat(sidecar): 实现支持多 CLI 的 Node.js Sidecar Server替代 websocat - 新增基于 Node.js 的 Sidecar Server,支持通过 WebSocket URL 动态指定 CLI 命令 - Sidecar Server 维护命令白名单,拒绝非白名单命令连接 - 实现多并发 WebSocket 会话隔离与双向消息桥接 CLI stdin/stdout - 完善 CLI 子进程生命周期管理,支持优雅关闭和异常处理 - 提供健康检查 HTTP 端点,反映当前连接数和进程数 - 改造沙箱镜像及入口脚本,移除 websocat 用新的 Sidecar Server 启动逻辑 - 后端 Java 代码改造: - 移除 Pod 环境变量中的 CLI_COMMAND/CLI_ARGS,新增 ALLOWED_COMMANDS 参数 - 简化 Pod 复用逻辑,不再区分 provider 标签 - 动态构建 Sidecar WebSocket URI,包含 CLI 命令及参数 - AcpWebSocketHandler 传递 CLI 参数至 K8sRuntimeAdapter,注入配置到 Pod - 新增属性和单元测试覆盖命令校验、URI 构造、Pod 规格及连接生命周期 - 优化日志和错误处理,提升系统稳定性和安全性 * refactor(runtime): 统一沙箱运行时架构与接口设计 - 引入 SandboxProvider 抽象,多种沙箱类型实现统一接口 - 本地沙箱启动 Sidecar Server,文件操作统一通过 Sidecar HTTP API - K8s 沙箱改用 Pod 内 Sidecar HTTP API 替代 kubectl exec 操作文件 - 消除 Handler 中 Runtime 类型分支,统一通过 SandboxProvider 交互 - 实现 SandboxInitPipeline 统一初始化流程,所有沙箱共用 - 完善初始化阶段设计,支持沙箱获取、文件系统健康检查、配置注入等 - 新增 SandboxProviderRegistry 管理多种沙箱提供者实例 - 定义统一沙箱配置 SandboxConfig 和沙箱信息 SandboxInfo 结构 - 适配前端 WebSocket 与后端流水线调用,保证行为一致性 - 提升架构扩展性,支持未来新增 E2B 等多种沙箱类型 - 统一错误处理与重试策略,保证初始化可靠性 * chore(cleanup): ignore docs and remove local-generated files * chore(gitignore): stop tracking docs directory * fix(build): restore local CSB system dependency * feat(sandbox): 支持 HiWork 与 HiCoding 沙箱模式对接 - HiWork 和 HiCoding 启用运行时选择器,支持本地与 K8s 运行时切换 - WebSocket URL 增加 sandboxMode 参数,K8s 模式传递 user 级沙箱参数 - HiWork 和 HiCoding 页面展示沙箱状态(creating、ready、error) - K8s 沙箱就绪后自动创建 Quest,避免 Pod 未准备完成时请求失败 - HiCli CliSelector 补充传递 sandboxMode 参数及认证方案差异化支持 - 增加认证方案步骤,根据 CLI 工具支持展示分别认证选项 - 后端 CliProviderConfig 扩展 authOptions 和 authEnvVar,支持认证凭据注入环境变量 - 按 CLI 工具差异化生成 MCP 与 Skill 配置文件路径,新增相关 ConfigGenerator 实现 - 运行时选择单一选项时自动选中,提升用户体验 - 提供 Kiro CLI 沙箱认证待支持提示,禁用对应运行时连接按钮 - 增加全面的属性测试和单元测试覆盖正确性及异常处理 - 升级 Dockerfile 基础镜像,注释调整 Arthas 安装步骤 * feat(acp): 实现 HiWork 和 HiCoding 沙箱认证及配置支持 - 在后端 AcpProperties.CliProviderConfig 中新增 authOptions 和 authEnvVar 字段支持认证方案配置 - 扩展 CliProviderController 接口返回 authOptions 和 authEnvVar,支持前端渲染 - 在 AcpWebSocketHandler 中实现 authToken 注入环境变量逻辑,支持 CLI 子进程认证凭据传递 - 新增 ClaudeCodeConfigGenerator 和 QoderCliConfigGenerator,生成对应配置文件并支持 MCP 配置合并 - LocalSandboxProvider 支持通过 query 参数传递环境变量给 Sidecar,解决进程复用环境变量更新问题 - 前端扩展 ICliProvider、CliSessionConfig 类型,支持认证方案和 authToken 传递 - CliSelector 组件新增认证方案选择步骤,支持默认方案和 Personal Access Token 输入 - Claude Code + K8s 运行时显示 API Key 输入框,Kiro CLI + K8s 运行时禁用连接并展示提示 - HiWork(Quest.tsx)和 HiCoding(Coding.tsx)页面支持运行时选择和沙箱模式参数传递 - 连接流程支持 sandboxMode 传递,沙箱状态管理和自动创建 Quest 逻辑调整为等待沙箱就绪 - 修改配置文件 application.yml,更新相关 CLI 认证环境变量名及认证方案配置 - 补充步骤工具函数 pure computeSteps 函数中插入认证方案步骤逻辑,实现可见性控制 * fix(hicli): 修复延迟创建会话避免空白会话自动生成 - 移除 HiCli.tsx 中 WebSocket 连接成功且 ACP 初始化完成时自动创建空会话的逻辑 - 修改主内容区渲染,支持无活跃会话时展示输入框和欢迎提示 - sendPrompt 支持无活跃会话时先创建会话再发送消息 - "+"按钮复用已有无消息空会话,避免重复创建 - HiCliWelcome 组件展示空初始欢迎提示,不再仅显示“新建 Quest”按钮 - HiCliSidebar 空状态提示改为“发送消息开始新对话” - HiCliTopBar 模式选择器无活动会话时回退到初始化状态的 mode - 保持发送消息、会话切换、多会话创建等行为不变 - 新增属性基和探索性测试覆盖所有 CLI Provider 和运行时类型,确保修复正确无回归 * fix(acp): 优化沙箱主机信息传递和终端连接的Pod健康校验 - 修改默认K8s命名空间为"default",替换原"himarket" - 在沙箱WebSocketHandler中推送沙箱Host信息给前端,提高状态通知准确性 - 新增sendSandboxStatus重载方法,支持携带sandboxHost参数 - PodReuseManager新增getHealthyPodEntry方法,增强Pod健康状态验证和缓存回退逻辑 - TerminalWebSocketHandler中实现终端连接时对Pod的健康重试机制,提升连接稳定性 - 调整K8sRuntimeAdapter和PodReuseManager的默认命名空间配置一致性 * refactor(acp): 优化Pod缓存处理逻辑和代码格式 - 统一Pod缓存计算方法,增强Pod健康检查逻辑 - 优化日志记录格式,提升可读性 - 修改K8s终端后端构造参数格式,提升代码一致性 - 前端测试代码中移除未使用变量,修正大小写使其规范 - 统一HiCliWelcome组件中未使用的props命名以避免警告 - 保持性属性测试中调整模拟函数参数和变量命名,改进代码可维护性 * chore(build): 优化Docker镜像构建和调整WebSocket重连次数 - 将Docker镜像构建脚本中buildx添加--pull=false参数,避免每次拉取最新基础镜像 - 将useAcpWebSocket和useTerminalWebSocket的maxReconnectAttempts参数从10降至2,减少重连次数 - 修改application.yml配置,将acp.local-enabled从true改为false - 调整Header组件中标签顺序,优化用户界面显示顺序 * feat(cli): 默认选中市场模型并自动选择第一个市场模型 - 将 CliSelector 组件的默认 modelConfigMode 从 'none' 改为 'market' - 切换 CLI 工具时重置模型配置模式为 'market' - 调整模型配置选项顺序,默认将市场模型提前显示 - MarketModelSelector 加载市场模型后自动选中第一个模型 - 实现自动调用 onChange 传递选中模型的信息以便后续使用 * fix: add git ignore * feat(skill): 实现技能包上传与预览功能 - 新增 skill_file 表及相关实体,支持存储技能包内文件内容及元信息 - 修改 product 表结构,去除 name 唯一约束并扩展 description 字段长度 - 实现技能包上传接口,支持 zip 和 tar.gz 格式,解析 SKILL.md 并校验 YAML front matter - 实现上传文件内容编码判断,文本内容直接保存,二进制文件 Base64 编码存储 - 支持批量 upsert skill_file 表,文件数量限制 500,单文件大小上限 5MB - 新增公开接口获取技能包文件树、单个文件内容、所有文件及整体压缩包下载 - himarket-admin 新增技能包管理 Tab,支持拖拽上传并预览文件树及文件内容 - himarket-frontend 扩展 SkillDetail 页面,添加文件树侧边栏及文件预览功能 - 前端预览支持 markdown 渲染和 Monaco Editor,二进制文件显示不支持预览提示 - CliSelector 逻辑改造,选择技能时调用接口获取完整文件列表写入 CliSessionConfig - generateSkillConfig 方法支持根据文件列表写入完整目录结构,兼容旧数据只写 SKILL.md - 添加 commons-compress 依赖用于 tar.gz 解压,使用 Java 标准库解压 zip 文件 - 详细异常处理,上传时缺少关键字段返回明确错误信息 - 保留向后兼容接口 GET /skills/{productId}/download 返回原 product.document 内容 - 设计文档、需求文档和任务清单全面覆盖实现细节及验证点 * feat(acp): 支持通过 WebSocket 消息延迟初始化沙箱并增加配置批量注入功能 - 新增 deferredInitMap 用于暂存未携带 session/config 消息的 WebSocket 连接参数 - 支持等待前端发送 session/config 消息后再启动沙箱初始化流水线 - ConfigInjectionPhase 改用 tar.gz 包批量注入配置文件,避免 JSON 序列化导致的 Unicode 规范化问题 - 实现 SandboxProvider.extractArchive 接口,通过 Sidecar HTTP API 上传 tar.gz 压缩包并解压 - K8sSandboxProvider 和 LocalSandboxProvider 实现 extractArchive 方法 - 增加对配置文件注入后抽样读回验证,确保文件正确解压且不为空 - PodReuseManager 新增等待 SLB 后端健康检查通过逻辑,确保流量可达 Pod 的 sidecar - AcpWebSocketHandler 优化消息处理逻辑,支持兼容旧客户端同时对新客户端延迟启动pipeline - 修复 SkillPackageServiceImpl 中编码内容长度计算冗余问题 - README 文档补充 Agent Skill 相关描述及 ServiceAccount 绑定 Role 的声明 - 测试覆盖改进,新增 ConfigInjectionPhase 相关单元测试,覆盖 tar.gz 解压及异常场景处理 * refactor(runtime): 优化 Pod 和 Service 名称管理,提升资源复用 - Pod 名称改为固定格式 sandbox-{userId},替代原随机生成名称 - Service 名称同样改为统一 sandbox-svc-{userId},确保与 Pod 对应关系明确 - 新增 getHealthyPodEntryWithDefaultClient 方法,便于无客户端引用调用方获取健康 Pod - 创建 Pod 前检查并删除残留同名旧 Pod,避免状态冲突 - 查询 Service 逻辑简化,直接按确定性名称查找,删除孤儿 Service 逻辑取消 - Service 创建改为 createOrReplace,支持更新已有服务 - 相关测试调整,适配新版固定 Pod 名称规则 feat(frontend): 集成 Monaco Editor,提升代码编辑体验 - 前端工程引入 vite-plugin-monaco-editor 插件 - 配置 Vite 以加载并启用 Monaco Editor - 移除 frontend 中对 monaco-editor 的手动加载冗余代码 fix(frontend): 增加目录树深度查询,优化文件浏览 - 将 fetchDirectoryTree 调用中 depth 参数由默认 5 增加至 10,支持更深层级目录展示 - 涉及 QuestInput 组件、Coding 页面以及 workspaceApi 工具的相关调用均作相应调整 * perf(config): 优化前端缓存策略和代码分割配置 - 在 proxy.conf 中为静态资源添加 365 天缓存策略 - 为静态资源设置 Cache-Control 头为 public, immutable - 在根路径添加 no-cache 头防止 HTML 缓存 - 移除 Google Fonts 的外部依赖导入 - 配置 Vite 打包时的代码分割策略 - 将 React 相关库分离到 vendor-react 块 - 将 Ant Design 相关库分离到 vendor-antd 块 - 将 Markdown 相关库分离到 vendor-markdown 块 - 将 Swagger UI 相关库分离到 vendor-swagger 块 - 将 XTerm 相关库分离到 vendor-xterm 块 * refactor(acp): 优化 K8s 集群配置获取逻辑,统一使用默认集群接口 - 新增 getDefaultClient() 方法获取默认 K8s 客户端,优先从缓存获取避免重复查库 - 新增 getDefaultConfigId() 方法获取默认集群 configId,简化集群配置获取流程 - 更新 AcpWebSocketHandler 使用 getDefaultConfigId() 替代直接列表查询 - 更新 PodReuseManager 中所有 getClient(null) 调用改为 getDefaultClient() - 简化 TerminalWebSocketHandler 集群获取逻辑,移除冗余的列表查询和错误处理 - 移除不必要的导入(K8sClusterInfo、List),减少代码耦合 - 统一 POC 阶段集群选择策略,提升代码可维护性 * refactor(runtime): 用 SandboxType 替换 RuntimeType 实现沙箱类型统一管理 - 将代码中的 RuntimeType 替换为更通用的 SandboxType,提升类型表达一致性 - 修改 AcpProperties、CliProviderController、AcpWebSocketHandler 等多个类相应字段与方法 - 删除不再使用的 RuntimeFactory 和 E2BSandboxProvider 实现,简化代码结构 - 调整 LocalRuntimeAdapter、K8sRuntimeAdapter、LocalFileSystemAdapter 等类以适配 SandboxType - 清理 AcpWebSocketHandler 中废弃的 K8s Pod 异步初始化和配置文件管理代码 - 修改异常类 FileSystemException 使用 SandboxType 表示错误来源沙箱类型 - 兼容老接口参数,保证向后兼容和代码行为一致性 * refactor(test): 调整K8sConfigService模拟代码格式以提高可读性 - 删除AcpWebSocketHandler中未使用的导入 - 移除PodReuseManager尾部多余空行 - 统一编写RuntimeControllerTest中K8sConfigService模拟类的多行格式 - 统一修改RuntimeAvailabilityPropertyTest中K8sConfigService模拟代码格式 - 统一更新RuntimeSelectionFilterPropertyTest中所有K8sConfigService模拟代码格式 - 统一调整RuntimeSelectorTest中K8sConfigService模拟类的格式和缩进 * feat(sandbox): Add detailed sandbox initialization progress tracking - Create SandboxInitProgress component to display 5-phase initialization workflow - Add initProgress state to QuestSessionContext for tracking phase status and progress - Add INIT_PROGRESS action type to questReducer for handling progress updates - Update useAcpSession to dispatch progress events from sandbox/init-progress notifications - Update useHiCliSession to dispatch progress events from cli/init-progress notifications - Integrate SandboxInitProgress display in HiCli, Coding, and Quest pages - Update .gitignore to exclude only /docs/acp instead of entire /docs directory - Display real-time progress with phase cards showing pending, executing, and completed states * chore(docs,config): Update steering docs and project configuration - Add Java coding standards guide for Spring Boot 17+ development - Add Spring Boot verification cycle documentation with test examples - Remove K8s environment context document (moved to separate reference) - Update .gitignore to exclude .app.pid file - Update application.yml configuration for bootstrap module - Refactor ACP WebSocket handler and code generation logic - Optimize K8s runtime adapter and sandbox provider implementations - Update frontend hooks for ACP and CLI session management - Refactor Coding and Quest page components - Remove root package.json (moved to module-specific configs) - Update sandbox Dockerfile and run.sh scripts - Update AGENTS.md and add CLAUDE.md documentation * feat(skills): Add comprehensive skill definitions for HiMarket and Kiro - Add create-issue-himarket skill for creating GitHub issues with Feature Request and Bug Report templates - Add create-pr-himarket skill for creating pull requests with conventional commit format validation - Add java-coding-standards skill for Java development guidelines - Add springboot-verification skill for Spring Boot project verification - Add start-himarket skill for HiMarket project initialization - Add tmux skill with helper scripts for session management and text waiting - Migrate skills from .claude to .kiro and .qoder directories for multi-agent support - Remove legacy steering documentation (java-coding-standards.md) - Update .gitignore to exclude jqwik database files - Standardize skill structure across all agent directories * docs(steering,agents): Remove outdated spec generation rules and update verification guidelines - Delete spec-generation.md steering document with outdated spec generation rules - Delete IMPLEMENTATION.md frontend documentation that is no longer maintained - Update AGENTS.md to mark code verification as required rather than optional - Add guidance for end-to-end verification of spec tasks with concrete steps - Clarify when to skip verification for non-interface tasks like refactoring - Fix newline at end of .gitignore file - Normalize runtime type string comparisons from mixed case to lowercase in useHiCliSession hook * refactor(cli): 统一重构 CLI 配置注入机制 - 移除废弃的 kiro-cli 和 codex CLI Provider 配置及相关代码 - Claude Code 支持自定义模型配置,环境变量注入 ANTHROPIC_API_KEY、ANTHROPIC_BASE_URL、ANTHROPIC_MODEL - Claude Code MCP 配置文件从 .claude/settings.json 迁移到 .mcp.json - 优化 Qwen Code 配置,API Key 不再写入 settings.json 的 env 字段 - 为 Claude Code 和 QoderCli 新增 Skills 配置写入,支持项目级 SKILL.md 文件注入 - 统一所有 CLI 的认证信息通过环境变量注入,避免敏感信息写入磁盘 - 配置文件写入失败时记录日志并继续执行,确保环境变量注入不中断 - 各 CLI Skills 配置目录隔离,防止配置冲突 * fix(acp): 拒绝无 token 的 WebSocket 连接,防止匿名访问 - AcpHandshakeInterceptor 修改为无 token 则拒绝握手并记录 warn 日志 - AcpHandshakeInterceptorTest 更新为断言无 token 连接被拒绝,不允许匿名访问 - WorkspaceController getCurrentUserId 方法改为无认证抛出业务异常,禁止匿名回退 - 调整相关导入,确保业务异常和错误码引用正确 - Coding 页面配置补全逻辑优化,自动填充模型和 CLI 提供者名称 - ConfigSidebar 修改配置变更回调,添加名称字段同步更新 - RuntimeSelector 测试用例中移除 POC 本地启动文案,改为正式描述“本地运行” - 移除前端多处 POC 和匿名访问相关注释和代码 - 删减无 token 连接允许的 POC 模式残余代码,提升安全性和代码洁净度 * refactor(frontend): Remove HiCli POC module and migrate shared components - Delete HiCli page, HiCliSessionContext, and useHiCliSession hook - Remove hicli/ directory components (AcpLogPanel, AgentInfoCard, HiCliSelector, etc.) - Migrate shared components from hicli/ to common/ (CustomModelForm, MarketMcpSelector, etc.) - Remove HiCli routes and navigation entries from Header and router - Delete HiCli-specific test files and context tests - Update .gitignore to exclude OpenSandbox local clone - Preserve all backend sandbox infrastructure (LocalSandboxProvider, K8s providers, etc.) - Update design and requirements documentation to reflect frontend-only cleanup scope - HiCli served its purpose as POC for ACP protocol validation; HiCoding now handles all CLI interactions * feat(acp): Refactor HiCoding architecture with modular config and sandbox services - Extract SessionConfigResolver service to handle CLI session configuration parsing independently - Extract ConfigFileBuilder service to generate configuration files from resolved configs - Extract AcpSessionInitializer to orchestrate session initialization pipeline - Extract AcpMessageRouter to handle WebSocket message routing and forwarding - Extract AcpConnectionManager to manage connection lifecycle and resource cleanup - Add SandboxHttpClient to unify HTTP communication across K8s and Local sandbox providers - Add InitErrorCode enum for structured error classification and handling - Add CliConfigGeneratorRegistry to manage CLI configuration generator implementations - Refactor AcpWebSocketHandler to lightweight entry point delegating to specialized services - Refactor ConfigInjectionPhase to use ConfigFileBuilder for config generation - Update InitContext to carry resolved session configuration through pipeline - Update SandboxProvider interface and implementations to use unified HTTP client - Migrate architecture specification to .kiro/specs/hicoding-architecture-refactor - Remove outdated poc-code-cleanup.md specification - Reduce AcpWebSocketHandler from 1000+ lines to <200 lines by delegating responsibilities - Eliminate code duplication between K8s and Local sandbox providers - Prepare clean extension points for OpenSandbox integration * feat(sandbox): 重构沙箱架构引入共享K8s Pod实现 - 重新定义SandboxType枚举,包含LOCAL、SHARED_K8S、OPEN_SANDBOX、E2B四种类型,移除旧K8S - 实现SharedK8sSandboxProvider,连接Helm预置共享沙箱Pod,无需动态调用K8s API - 保留OpenSandboxProvider和E2BSandboxProvider空实现,预留未来扩展接口 - 更新SandboxProviderRegistry注册逻辑,支持多沙箱类型自动注册 - 修改RuntimeSelector适配四种沙箱类型,更新运行时可用性判断和UI标签 - 新增Helm模板部署共享沙箱Deployment和ClusterIP Service - 移除旧的共享模式开关,增加共享Pod服务名配置,默认运行时改为shared-k8s - 工作目录实现基于用户ID隔离,Sidecar自动为每用户spawn独立CLI进程 - 支持旧值"k8s"向后兼容映射为SHARED_K8S,前端runtime参数同步更新 - 明确多用户并发访问共享Pod机制和安全隔离策略,简化权限需求,提升部署便捷性 * feat(hicoding): Implement connection resilience and artifact deduplication - Add connection resilience specs for HiCoding with design, requirements, and tasks documentation - Implement path normalization in artifactDetector to eliminate format inconsistencies (e.g., "./" prefix, duplicate slashes) - Refactor artifact detection to trigger only on completed/failed tool_call status, eliminating duplicate detections - Add ConnectionBanner component to display WebSocket connection status and reconnection progress - Implement exponential backoff reconnection strategy with infinite retry for ACP WebSocket - Add terminal WebSocket reconnection trigger when ACP connection recovers - Remove legacy CodingSession entity, repository, controller, service, and related DTOs - Update DevProxyController and WorkspaceController to remove CodingSession dependencies - Refactor ACP connection manager and WebSocket handler for improved resilience - Update K8s workspace service initialization phases for better config injection - Migrate frontend components to use unified ACP session management via useAcpSession hook - Remove SessionSidebar component and codingSession API module - Add sandbox push script for AMD64 architecture - Update Nacos skill maintenance documentation - Resolves duplicate artifact detection and improves user experience during connection interruptions * style(workspace,frontend): Format code and refactor WebSocket imports - Reformat method parameters and switch cases in WorkspaceController for consistency - Refactor K8sWorkspaceService file download logic with improved line breaks - Update QuestSessionContext to use null instead of undefined for state initialization - Remove unused noRuntimeProvider test fixture from useRuntimeSelection tests - Remove unused RECONNECT_CONFIG import from useTerminalWebSocket - Add type assertion for Blob creation in workspaceApi to fix type compatibility - Add push-podman.sh script for container image deployment automation * feat(himarket): Integrate Nacos default instance binding with Agent Skill - Add Nacos default instance and namespace concept to auto-bind Agent Skills - Extend nacos_instance table with is_default and default_namespace fields via Flyway V9-V10 - Implement default Nacos instance management APIs in NacosService and NacosController - Refactor SkillController to resolve Nacos coordinates through productId instead of direct paths - Replace SkillPackageParser with SkillZipParser and add FileTreeBuilder and SkillMdBuilder utilities - Remove SkillFile entity and SkillPackageService, consolidate into SkillService - Add UpdateSkillNacosParam DTO for skill-nacos association updates - Implement ProductService auto-binding of default Nacos instance for new AGENT_SKILL products - Add ApiProductLinkNacos component for frontend Nacos linking UI - Update NacosConsoles and ApiProductDetail pages to support default instance management - Enhance skill market capability checklist and add comprehensive Nacos integration specifications - Update ACP config generators to support Nacos skill integration - Migrate skill package upload flow to use new Nacos-integrated architecture * chore: 移除 Local 类型沙箱相关逻辑 * feat(acp,sandbox): Integrate Nacos CLI sandbox with enhanced workspace and runtime services - Add NacosEnvGenerator for Nacos environment variable configuration injection - Add SkillDownloadPhase for Agent Skill artifact download during initialization - Add ExecResult model for sandbox execution result handling - Rename K8sWorkspaceService to RemoteWorkspaceService for broader runtime abstraction - Update WorkspaceController to support runtime parameter routing (k8s/local) - Enhance ACP session initialization with Nacos configuration and skill download phases - Update ConsumerService and NacosServiceImpl for improved service integration - Add comprehensive sandbox integration documentation (SANDBOX_GUIDE.md) - Update Kiro specs for sandbox terminal, file tree, and Nacos CLI integration - Refactor sandbox Dockerfile and sidecar server for enhanced runtime support - Update WebSocket URL resolution and ACP session hooks for runtime awareness - Add and update unit tests for new phases, config generators, and workspace service - Improve skill upload script for sandbox artifact management * feat(himarket): Enhance skill download and improve UI loading states - Add authentication token support to skill package downloads in frontend components - Implement proper Authorization header handling for API requests - Replace anchor tag with button component for download functionality with error handling - Add skeleton loading states to ApiProductDetail for better UX during data fetch - Prevent menu item flickering by conditionally rendering navigation based on data availability - Update SkillZipParser to handle single-part resource paths without skill name prefix - Enhance upload-skills.sh script with automatic portal publication after successful skill upload - Add portal discovery logic to automatically publish skills to default portal when available * refactor(core): 将 ACP 相关包及引用统一重命名为 HiCoding - 将所有包路径中的 acp 修改为 hicoding,涵盖 websocket、runtime、sandbox、cli、filesystem 等模块 - 更新所有相关类的包导入路径,如 WebSocketConfig 中的处理器和拦截器 - 重命名了多个服务类及配置类的包路径以反映命名空间调整 - 修正注释中的 Handler 名称对应包路径变更 - 统一替换 ACP 命名为 HiCoding,确保整体代码结构一致性 - 调整 sandbox 初始化相关类的包路径及导入,保持依赖正确 - 重构运行时和沙箱相关类的包路径,改进模块划分清晰度 * docs(block-grouping): 添加完整的分组折叠算法设计文档 - 描述了AI对话界面中低价值操作块折叠成分组卡片的设计背景与目标 - 详细阐述了整体架构,包含多层设计及数据流传递 - 说明了ACP协议细节、事件类型及流状态机对分组的影响 - 介绍了从ACP事件到渲染块的转换逻辑及SubAgent数据流处理 - 定义了核心概念,包括Block类型、工具分类与状态、活动分组属性 - 设计了策略模式及多种折叠策略的继承体系与选择算法 - 详解了分组算法核心流程、最后消息保护策略及flush机制 - 说明了isExploring状态判断及SubAgent递归处理流程 - 给出了策略折叠规则与各类型block折叠判断流程的设计思路 * fix(runtime): 删除 RemoteRuntimeAdapter 中的闲置检查逻辑 - 移除 RemoteRuntimeAdapter 中的闲置超时检测相关代码及相关字段 - 删除 lastActiveAt 状态和 idleTimeoutSeconds 配置 - 去除与闲置检测相关的定时任务调度和触发逻辑 - 简化了 WebSocket 连接的部分代码实现 fix(sandbox): 使用绝对路径替换相对路径以确保 exec 调用正确 - 在 SkillDownloadPhase 中将 nacos-env 配置文件路径及技能目录转换为绝对路径 - 添加 toAbsolutePath 方法,实现基于 workspacePath 拼接绝对路径 - 确保 exec 调用时工作目录无关,避免路径解析错误 feat(coding): 优化默认 CLI 和模型自动选择逻辑 - 移除 ConfigSidebar 中自动选中第一个可用 CLI 的代码 - 改为在 Coding.tsx 页面加载时自动异步选择默认 CLI Provider 和默认模型 - 使用全新 useCodingConfig Hook 管理配置状态,实现配置纯内存管理且不依赖 localStorage - 在 send message 场景下,自动填充基本的 cliSessionConfig 包含 modelProductId,防止数据缺失 fix(adp): 处理 MODEL_API 模型不返回 methodPathList 场景 - 在 AdpAIGatewayOperator 中 MODEL_API 场景下,对缺失 methodPathList 的模型构造兜底路由 - 路由采用 basePath 拼接默认路径 "/v1/chat/completions" - 确保 BaseUrlExtractor 能正确提取 baseUrl chore(sandbox): 更新 Dockerfile 中的 nacos-cli 安装及验证 - nacos-cli 安装完成后改为执行 help 命令替代 version 命令验证安装结果 - 移除未适配的 kiro-cli 安装注释代码 style(code): 代码格式及导入整理 - 删除无用导入 - 统一类型定义换行格式 - 修正代码注释对齐与格式 * test(frontend): Remove unused imports and add type assertions - Remove unused fireEvent import from RuntimeSelector.test.tsx - Add type assertions to wsUrl.test.ts test cases for cliSessionConfig parameter - Resolve TypeScript linting issues in test files * feat(terminal): 实现 Terminal 心跳保活与断连重连机制 - 新建 WebSocketPingScheduler 共享 Ping 调度器,为 ACP 和 Terminal Handler 提供定时 ping - 集成 Ping 调度器到 HiCodingWebSocketHandler 和 TerminalWebSocketHandler 的连接生命周期方法 - RemoteTerminalBackend 添加心跳保活,定时发送协议级 ping 维持后端连接活跃 - RemoteTerminalBackend 实现断连自动重连,采用指数退避策略,最多重连 5 次后放弃 - 修改 WebSocketConfig 配置,设置最大空闲超时时间为 120 秒,提高连接稳定性 - 确保 ping 和重连任务使用守护线程,防止阻止 JVM 退出 - 增加异常捕获和日志,保证 ping 发送失败时不影响整体连接 - 完善单元测试和属性基测试,覆盖心跳和重连功能的正确性与健壮性 * feat(coding-session): 新增编码会话管理接口及远程文件上传支持 - 新增 coding_session 数据库表及对应实体 CodingSession,实现会话持久化 - 实现 CodingSession REST API 支持会话的增删改查操作 - 支持通过 WebSocket 拦截 session/new 和 session/load 请求,实现 cwd 路径绝对化 - 远程沙箱新增文件上传接口,限制单文件最大 5MB,支持 Base64 编码传输 - 远程文件路径自动关联用户工作空间,防止路径穿越问题 - 新增 CodingSessionServiceImpl 服务实现会话业务逻辑,支持用户最大会话数量限制 - 修正部分文档中“新建会话”按钮命名为“新会话”以保持一致 - 新增 HiCoding 全流程集成测试覆盖会话和 WebSocket 交互场景 * feat(runtime): 支持 Sidecar 会话 detach 和重连功能 - 数据库添加 coding_session 表的 model_product_id 和 model_name 字段 - CodingSession 实体及相关 DTO 新增 modelProductId 和 modelName 字段 - RemoteRuntimeAdapter 支持通过控制消息获取 sidecarSessionId - 增加 detach() 方法进入 DETACHED 状态,关闭 WebSocket 但保留输出流以供重连 - 实现 reconnect() 方法支持基于 sidecarSessionId 重新连接 sidecar - 新增 RuntimeStatus.DETACHED 表示 WebSocket 已断开但会话仍存活 - SandboxInfo 增加支持生成带 sessionId 的 attach 模式 WebSocket URI - HiCodingConnectionManager 管理已 detach 的会话,支持查询、重用和销毁 - WebSocket 关闭时,如果是 RemoteRuntimeAdapter 且有 sidecarSessionId,改为 detach 保存会话 - HiCodingWebSocketHandler 优先尝试从已 detach 会话中恢复连接,跳过完整初始化 - 前端 Coding 相关增加 reattach 事件监听,支持恢复已有会话 - SessionSidebar 显示 providerKey 和 modelName 信息 - 调整 loadSession 流程,支持 load 返回 null 表示成功恢复会话,避免重复新建 - 会话加载时处理 providerKey,保证切换历史会话自动完成正确连接和重连逻辑 - 修复无 token 接口返回状态由 401 改为 403,统一访问控制响应状态 * feat(editor): 重构编辑器视图,添加代码高亮和文件操作按钮 - 使用 highlight.js 实现基于语言的代码高亮展示,替代 Monaco Editor 只读模式 - 新增代码头部操作栏,支持文件刷新、复制、自动换行切换和下载功能 - 支持 PDF、图片和二进制文件的分别预览与下载 - 优化文件树组件,根据文件扩展名展示更丰富的图标分类 - 允许通过回调刷新编辑区文件内容,实现文件同步更新 - 删除旧的 Monaco Editor 相关依赖和代码,明确组件渲染逻辑 - 在 Coding 页面中添加刷新文件内容的回调处理,更新打开文件状态 chore(sidecar-server): 提取并模块化服务器主入口和配置文件 - 将原有冗长的 index.js 文件拆分成 server.js 和 config.js 两部分 - config.js 负责读取环境变量及默认配置,导出配置常量 - index.js 精简为仅导入并启动服务器,实现职责分离 - 保留旧版核心逻辑注释在提交记录供代码对比参考 * feat(workspace): 增加工作空间路径安全校验和持久化支持 - 将 sandbox 工作空间卷挂载改为本地持久化路径 - Helm 模板新增 sandbox 工作空间持久化 PVC 配置及相关值 - RemoteWorkspaceService 中路径校验方法改为基于用户目录校验,防止跨用户访问 - RemoteSandboxProvider 和 SkillDownloadPhase 增加路径越界安全检查 - 解除 SkillController 下载接口 AdminOrDeveloperAuth 注解限制 - RemoteRuntimeAdapter 处理多条 WebSocket 消息支持逐行解析控制信息 - HiCodingConnectionManager 增加定时清理过期分离的 detached 会话功能 - HiCodingWebSocketHandler 优化连接重连逻辑,确保重连存活 - WebSocketPingScheduler 线程池升级为双线程调度器 - CodingSessionServiceImpl 优化清理用户超额会话逻辑,支持分页删除 - 前端 CodingSessionContext 修复并优化历史消息回放中 user_message_chunk 处理 - 前端 useCodingSession 增强日志输出,方便调试 session 更新流程 - Coding 页面改进会话重置和恢复逻辑,避免自动恢复旧会话引发的问题 - sandbox 容器允许更多命令,包含 opencode 和 claude-agent-acp - sidecar-server 优化会话关闭时输出缓存清理,修复重复清理问题 - sidecar-server 终端模块增加输出环形缓冲区,增强会话状态与活动时间跟踪 - 终端会话关闭和错误时触发专用销毁函数,保证状态一致性 - 其他若干日志和异常处理的细节修正和提升 * chore: 将 .kiro 目录添加到 gitignore 忽略 AWS Kiro AI coding agent 的配置目录 🤖 Generated with [Qoder][https://qoder.com] * chore: 从版本控制中移除 .kiro 目录 该目录包含 Kiro AI coding agent 的本地配置,不应提交到仓库 🤖 Generated with [Qoder][https://qoder.com] * fix: 添加 JetBrains 仓库解决 pty4j 依赖下载问题 pty4j 的传递依赖 purejavacomm 不在 Maven Central,需要从 JetBrains 仓库下载 🤖 Generated with [Qoder][https://qoder.com] * fix: 更正 JetBrains 仓库地址解决 purejavacomm 依赖问题 purejavacomm 是 pty4j 的传递依赖,需要从 JetBrains intellij-dependencies 仓库下载 🤖 Generated with [Qoder][https://qoder.com] * fix(ci): test-reporter 在 fork PR 中添加 continue-on-error dorny/test-reporter 在 fork PR 中缺少 checks:write 权限导致报告生成失败, 该步骤仅用于生成测试报告,不应阻塞 CI 流程 🤖 Generated with [Qoder][https://qoder.com] --- .claude/skills/create-issue-himarket/SKILL.md | 163 ++ .claude/skills/create-pr-himarket/SKILL.md | 278 +++ .claude/skills/java-coding-standards/SKILL.md | 147 ++ .../skills/springboot-verification/SKILL.md | 231 +++ .claude/skills/tmux/SKILL.md | 153 ++ .claude/skills/tmux/scripts/find-sessions.sh | 112 ++ .claude/skills/tmux/scripts/wait-for-text.sh | 83 + .github/workflows/backend-ci.yml | 1 + .gitignore | 26 +- .husky/pre-commit | 10 - .mvn/wrapper/maven-wrapper.properties | 3 + .qoder/skills/create-issue-himarket/SKILL.md | 163 ++ .qoder/skills/create-pr-himarket/SKILL.md | 278 +++ .qoder/skills/java-coding-standards/SKILL.md | 147 ++ .qoder/skills/publish-skills/SKILL.md | 216 +++ .../skills/springboot-verification/SKILL.md | 231 +++ .qoder/skills/tmux/SKILL.md | 153 ++ .qoder/skills/tmux/scripts/find-sessions.sh | 112 ++ .qoder/skills/tmux/scripts/wait-for-text.sh | 83 + AGENTS.md | 202 ++ CLAUDE.md | 366 ++++ README.md | 4 +- README_zh.md | 4 +- ...32\346\234\254\350\257\264\346\230\216.md" | 2 +- deploy/docker/docker-compose.yml | 23 +- deploy/docker/scripts/data/.env | 2 +- ...32\346\234\254\350\257\264\346\230\216.md" | 2 +- deploy/helm/charts/nacos/.helmignore | 4 + deploy/helm/charts/nacos/Chart.yaml | 6 + deploy/helm/charts/nacos/init-db.sh | 206 +++ deploy/helm/charts/nacos/nacos-installer.sh | 854 +++++++++ deploy/helm/charts/nacos/sql/mysql-schema.sql | 179 ++ deploy/helm/charts/nacos/sql/pg-schema.sql | 410 ++++ deploy/helm/charts/nacos/sql/upgrade/.gitkeep | 0 .../charts/nacos/templates/deployment.yaml | 83 + .../helm/charts/nacos/templates/service.yaml | 23 + deploy/helm/charts/nacos/values.yaml | 33 + deploy/helm/scripts/data/.env | 6 +- deploy/helm/scripts/deploy.sh | 81 +- deploy/helm/templates/himarket-server-cm.yaml | 4 + .../templates/sandbox-shared-deployment.yaml | 48 + deploy/helm/templates/sandbox-shared-pvc.yaml | 21 + .../templates/sandbox-shared-service.yaml | 17 + deploy/helm/values.yaml | 24 +- docs/SANDBOX_GUIDE.md | 237 +++ docs/block-grouping.md | 1647 +++++++++++++++++ .../himarket-nacos-skill-integration-spec.md | 707 +++++++ docs/skill-market-capability-checklist.md | 288 +++ himarket-bootstrap/Dockerfile | 24 +- himarket-bootstrap/pom.xml | 7 + .../alibaba/himarket/HiMarketApplication.java | 3 + .../alibaba/himarket/config/FlywayConfig.java | 2 + .../himarket/config/SecurityConfig.java | 6 +- .../himarket/config/WebSocketConfig.java | 43 + .../src/main/resources/application-h2.yml | 19 + .../src/main/resources/application.yml | 48 +- .../V10__Add_nacos_default_fields.sql | 8 + .../migration/V11__Drop_k8s_cluster_table.sql | 1 + .../V12__Add_coding_session_table.sql | 14 + .../V13__Add_coding_session_model_fields.sql | 3 + .../migration/V7__Add_k8s_cluster_table.sql | 20 + .../db/migration/V8__Add_skill_file_table.sql | 13 + .../migration/V9__Drop_skill_file_table.sql | 2 + .../src/main/resources/logback-spring.xml | 13 +- .../himarket/entity/CodingSession.java | 48 + .../himarket/entity/NacosInstance.java | 8 + .../com/alibaba/himarket/entity/Product.java | 7 +- .../repository/CodingSessionRepository.java | 21 + .../repository/NacosInstanceRepository.java | 7 + .../himarket/support/enums/ProductType.java | 2 + .../support/product/ProductFeature.java | 2 + .../himarket/support/product/SkillConfig.java | 52 + himarket-server/pom.xml | 31 +- .../himarket/config/AcpProperties.java | 223 +++ .../controller/CliProviderController.java | 406 ++++ .../controller/CodingSessionController.java | 44 + .../himarket/controller/NacosController.java | 19 + .../controller/ProductController.java | 10 + .../himarket/controller/SkillController.java | 205 ++ .../controller/WorkspaceController.java | 201 ++ .../himarket/core/advice/ResponseAdvice.java | 4 +- .../himarket/core/exception/ErrorCode.java | 9 + .../himarket/core/skill/FileTreeBuilder.java | 125 ++ .../himarket/core/skill/SkillMdBuilder.java | 29 + .../himarket/core/skill/SkillMdDocument.java | 54 + .../himarket/core/skill/SkillMdParser.java | 129 ++ .../himarket/core/skill/SkillZipParser.java | 264 +++ .../himarket/core/utils/TokenUtil.java | 7 +- .../coding/CreateCodingSessionParam.java | 23 + .../coding/UpdateCodingSessionParam.java | 13 + .../params/product/UpdateSkillNacosParam.java | 14 + .../dto/result/cli/MarketMcpInfo.java | 38 + .../dto/result/cli/MarketMcpsResponse.java | 34 + .../dto/result/cli/MarketModelInfo.java | 40 + .../dto/result/cli/MarketModelsResponse.java | 33 + .../dto/result/cli/MarketSkillInfo.java | 37 + .../result/coding/CodingSessionResult.java | 28 + .../dto/result/nacos/NacosResult.java | 4 + .../dto/result/product/ProductResult.java | 3 + .../result/skill/SkillFileContentResult.java | 38 + .../dto/result/skill/SkillFileTreeNode.java | 45 + .../service/CodingSessionService.java | 18 + .../himarket/service/ConsumerService.java | 9 + .../himarket/service/NacosService.java | 46 + .../himarket/service/ProductService.java | 9 + .../himarket/service/SkillService.java | 51 + .../document/DocumentConversionService.java | 143 ++ .../service/gateway/AdpAIGatewayOperator.java | 20 + .../hichat/service/AbstractLlmService.java | 3 +- .../hichat/support/LlmChatRequest.java | 35 - .../hicoding/RemoteWorkspaceService.java | 443 +++++ .../cli/ClaudeCodeConfigGenerator.java | 154 ++ .../hicoding/cli/CliConfigGenerator.java | 73 + .../cli/CliConfigGeneratorRegistry.java | 31 + .../hicoding/cli/ConfigFileBuilder.java | 238 +++ .../hicoding/cli/NacosEnvGenerator.java | 120 ++ .../hicoding/cli/OpenCodeConfigGenerator.java | 194 ++ .../hicoding/cli/ProtocolTypeMapper.java | 37 + .../hicoding/cli/QoderCliConfigGenerator.java | 125 ++ .../hicoding/cli/QwenCodeConfigGenerator.java | 228 +++ .../hicoding/filesystem/BaseUrlExtractor.java | 88 + .../hicoding/filesystem/FileEntry.java | 6 + .../service/hicoding/filesystem/FileInfo.java | 12 + .../filesystem/FileSystemAdapter.java | 65 + .../filesystem/FileSystemException.java | 54 + .../hicoding/filesystem/PathValidator.java | 91 + .../filesystem/SidecarFileSystemAdapter.java | 196 ++ .../runtime/RemoteRuntimeAdapter.java | 520 ++++++ .../hicoding/runtime/RuntimeAdapter.java | 72 + .../hicoding/runtime/RuntimeConfig.java | 65 + .../runtime/RuntimeFaultNotification.java | 40 + .../hicoding/runtime/RuntimeStatus.java | 22 + .../service/hicoding/sandbox/ConfigFile.java | 11 + .../hicoding/sandbox/E2BSandboxProvider.java | 54 + .../service/hicoding/sandbox/ExecResult.java | 6 + .../hicoding/sandbox/OpenSandboxProvider.java | 54 + .../service/hicoding/sandbox/PodInfo.java | 15 + .../sandbox/RemoteSandboxProvider.java | 163 ++ .../hicoding/sandbox/SandboxConfig.java | 16 + .../hicoding/sandbox/SandboxHttpClient.java | 229 +++ .../service/hicoding/sandbox/SandboxInfo.java | 104 ++ .../hicoding/sandbox/SandboxProvider.java | 95 + .../sandbox/SandboxProviderRegistry.java | 36 + .../service/hicoding/sandbox/SandboxType.java | 42 + .../sandbox/init/ConfigInjectionPhase.java | 109 ++ .../sandbox/init/FileSystemReadyPhase.java | 77 + .../hicoding/sandbox/init/InitConfig.java | 14 + .../hicoding/sandbox/init/InitContext.java | 159 ++ .../hicoding/sandbox/init/InitErrorCode.java | 56 + .../hicoding/sandbox/init/InitEvent.java | 17 + .../hicoding/sandbox/init/InitPhase.java | 25 + .../sandbox/init/InitPhaseException.java | 28 + .../hicoding/sandbox/init/InitResult.java | 28 + .../hicoding/sandbox/init/PhaseStatus.java | 11 + .../hicoding/sandbox/init/RetryPolicy.java | 24 + .../sandbox/init/SandboxAcquirePhase.java | 49 + .../sandbox/init/SandboxInitPipeline.java | 175 ++ .../sandbox/init/SidecarConnectPhase.java | 51 + .../sandbox/init/SkillDownloadPhase.java | 170 ++ .../hicoding/session/CliSessionConfig.java | 43 + .../hicoding/session/CustomModelConfig.java | 63 + .../hicoding/session/McpConfigResolver.java | 142 ++ .../hicoding/session/ModelConfigResolver.java | 179 ++ .../session/ResolvedSessionConfig.java | 58 + .../session/SessionConfigResolver.java | 146 ++ .../hicoding/session/SessionInitializer.java | 225 +++ .../terminal/RemoteTerminalBackend.java | 292 +++ .../hicoding/terminal/TerminalBackend.java | 55 + .../hicoding/terminal/TerminalProcess.java | 166 ++ .../terminal/TerminalWebSocketHandler.java | 179 ++ .../hicoding/websocket/CliProcess.java | 213 +++ .../websocket/HiCodingConnectionManager.java | 304 +++ .../HiCodingHandshakeInterceptor.java | 87 + .../websocket/HiCodingMessageRouter.java | 116 ++ .../websocket/HiCodingWebSocketHandler.java | 715 +++++++ .../websocket/WebSocketPingScheduler.java | 84 + .../impl/CodingSessionServiceImpl.java | 95 + .../service/impl/ConsumerServiceImpl.java | 6 +- .../service/impl/NacosServiceImpl.java | 100 + .../service/impl/ProductServiceImpl.java | 78 +- .../service/impl/SkillServiceImpl.java | 230 +++ .../controller/CliProviderControllerTest.java | 176 ++ .../core/skill/SkillMdParserTest.java | 185 ++ .../service/hicoding/HiCodingFullE2ETest.java | 1628 ++++++++++++++++ .../cli/QwenCodeConfigGeneratorMcpTest.java | 216 +++ .../cli/QwenCodeConfigGeneratorSkillTest.java | 118 ++ .../filesystem/PathTraversalPropertyTest.java | 186 ++ .../filesystem/PathValidatorTest.java | 123 ++ .../RuntimeFaultNotificationPropertyTest.java | 204 ++ .../runtime/RuntimeFaultNotificationTest.java | 107 ++ .../sandbox/SandboxInfoPropertyTest.java | 235 +++ .../sandbox/SandboxProviderRegistryTest.java | 81 + .../hicoding/sandbox/init/InitPhasesTest.java | 414 +++++ .../init/SandboxInitPipelinePropertyTest.java | 847 +++++++++ .../sandbox/init/SandboxInitPipelineTest.java | 1145 ++++++++++++ .../session/QwenCodeAuthFlowTest.java | 438 +++++ .../hicoding/session/SessionModelsTest.java | 236 +++ .../hicoding/websocket/AcpPropertiesTest.java | 132 ++ .../websocket/CliProcessMultiCliTest.java | 187 ++ .../websocket/HiCodingAuthenticationTest.java | 244 +++ .../HiCodingHandshakeInterceptorTest.java | 151 ++ .../HiCodingPromptExecutionTest.java | 245 +++ .../websocket/HiCodingWebSocketE2ETest.java | 167 ++ himarket-web/himarket-admin/.npmrc | 1 + himarket-web/himarket-admin/package.json | 20 +- himarket-web/himarket-admin/proxy.conf | 3 + .../himarket-admin/src/aliyunThemeToken.ts | 2 +- .../api-product/ApiProductFormModal.tsx | 35 +- .../api-product/ApiProductLinkApi.tsx | 3 + .../api-product/ApiProductLinkNacos.tsx | 162 ++ .../api-product/ApiProductOverview.tsx | 140 +- .../api-product/ApiProductSkillPackage.tsx | 299 +++ .../api-product/ApiProductUsageGuide.tsx | 183 +- .../api-product/ModelFeatureForm.tsx | 191 +- .../api-product/SkillConfigForm.tsx | 32 + himarket-web/himarket-admin/src/lib/api.ts | 41 +- himarket-web/himarket-admin/src/lib/utils.ts | 47 +- .../src/pages/ApiProductDetail.tsx | 57 +- .../himarket-admin/src/pages/ApiProducts.tsx | 619 ++----- .../src/pages/NacosConsoles.tsx | 136 +- .../himarket-admin/src/types/api-product.ts | 13 +- .../himarket-admin/src/types/gateway.ts | 2 + himarket-web/himarket-admin/vite.config.ts | 5 +- himarket-web/himarket-frontend/.npmrc | 1 + .../.vite/deps/_metadata.json | 8 - .../himarket-frontend/.vite/deps/package.json | 3 - .../himarket-frontend/IMPLEMENTATION.md | 233 --- himarket-web/himarket-frontend/package.json | 20 +- himarket-web/himarket-frontend/proxy.conf | 30 + .../himarket-frontend/src/aliyunThemeToken.ts | 2 +- .../src/components/Header.tsx | 37 +- .../src/components/Layout.tsx | 6 +- .../src/components/MarkdownRender.tsx | 2 +- .../src/components/ProductHeader.tsx | 2 +- .../src/components/chat/Area.tsx | 24 +- .../src/components/chat/Messages.tsx | 4 +- .../src/components/chat/Sidebar.tsx | 2 +- .../components/coding/ActivityGroupCard.tsx | 510 +++++ .../src/components/coding/AgentMessage.tsx | 182 ++ .../src/components/coding/ArtifactPreview.tsx | 205 ++ .../components/coding/ChangesView.test.tsx | 40 + .../src/components/coding/ChangesView.tsx | 68 + .../src/components/coding/ChatStream.test.tsx | 32 + .../src/components/coding/ChatStream.tsx | 470 +++++ .../components/coding/CliProviderSelect.tsx | 63 + .../src/components/coding/CodingInput.tsx | 583 ++++++ .../src/components/coding/CodingTopBar.tsx | 79 + .../src/components/coding/ConfigDropdowns.tsx | 525 ++++++ .../src/components/coding/ConfigSidebar.tsx | 402 ++++ .../components/coding/ConnectionBanner.tsx | 82 + .../components/coding/ConversationTopBar.tsx | 26 + .../src/components/coding/DiffViewer.tsx | 71 + .../src/components/coding/EditorArea.tsx | 413 +++++ .../components/coding/ErrorMessage.test.tsx | 44 + .../src/components/coding/ErrorMessage.tsx | 38 + .../src/components/coding/FileMentionMenu.tsx | 157 ++ .../src/components/coding/FileTree.tsx | 180 ++ .../src/components/coding/InlineArtifact.tsx | 75 + .../components/coding/PermissionDialog.tsx | 65 + .../src/components/coding/PlanDisplay.tsx | 159 ++ .../src/components/coding/PreviewPanel.tsx | 185 ++ .../components/coding/SandboxStatusCard.tsx | 112 ++ .../src/components/coding/SessionSidebar.css | 44 + .../src/components/coding/SessionSidebar.tsx | 535 ++++++ .../src/components/coding/SlashMenu.tsx | 167 ++ .../src/components/coding/TerminalOutput.tsx | 14 + .../src/components/coding/TerminalPanel.tsx | 210 +++ .../src/components/coding/TerminalView.tsx | 39 + .../src/components/coding/ThoughtBlock.tsx | 220 +++ .../components/coding/ToolCallCard.test.tsx | 63 + .../src/components/coding/ToolCallCard.tsx | 789 ++++++++ .../src/components/coding/ToolDetails.tsx | 74 + .../src/components/coding/ToolPanel.tsx | 86 + .../src/components/coding/UserMessage.tsx | 48 + .../coding/renderers/ArtifactRenderer.tsx | 40 + .../coding/renderers/FileRenderer.tsx | 76 + .../coding/renderers/HtmlRenderer.tsx | 14 + .../coding/renderers/ImageRenderer.tsx | 49 + .../coding/renderers/MarkdownRenderer.tsx | 24 + .../coding/renderers/PdfRenderer.tsx | 45 + .../coding/renderers/SvgRenderer.tsx | 14 + .../src/components/common/CliSelector.tsx | 477 +++++ .../src/components/common/CustomModelForm.tsx | 119 ++ .../components/common/MarketMcpSelector.tsx | 174 ++ .../components/common/MarketModelSelector.tsx | 153 ++ .../components/common/MarketSkillSelector.tsx | 183 ++ .../components/common/SearchFilterInput.tsx | 38 + .../src/components/common/SelectableCard.tsx | 64 + .../src/components/common/WelcomePage.tsx | 60 + .../common/__tests__/SelectableCard.test.tsx | 139 ++ .../__tests__/cliSelection.property.test.ts | 104 ++ .../stepCalculation.property.test.ts | 45 + .../common/__tests__/stepUtils.test.ts | 152 ++ .../src/components/common/index.ts | 12 + .../src/components/common/stepUtils.ts | 64 + .../src/components/skill/InstallCommand.tsx | 240 +++ .../src/components/skill/RelatedSkills.tsx | 78 + .../src/components/skill/SkillFileTree.tsx | 79 + .../src/components/skill/SkillMdViewer.tsx | 120 ++ .../src/components/square/SkillCard.tsx | 70 + .../src/context/CodingSessionContext.test.ts | 302 +++ .../src/context/CodingSessionContext.tsx | 1066 +++++++++++ .../hooks/__tests__/useCodingSession.test.ts | 101 + .../src/hooks/useCodingConfig.ts | 35 + .../src/hooks/useCodingSession.ts | 834 +++++++++ .../src/hooks/useCodingWebSocket.ts | 206 +++ .../src/hooks/useResizable.ts | 110 ++ .../src/hooks/useTerminalWebSocket.ts | 218 +++ himarket-web/himarket-frontend/src/index.css | 13 + .../src/lib/apis/cliProvider.ts | 169 ++ .../src/lib/apis/codingSession.ts | 93 + .../himarket-frontend/src/lib/apis/index.ts | 8 +- .../himarket-frontend/src/lib/apis/product.ts | 3 +- .../himarket-frontend/src/lib/apis/typing.ts | 7 +- .../himarket-frontend/src/lib/skillMdUtils.ts | 60 + .../himarket-frontend/src/lib/statusUtils.ts | 1 + .../cliProviderSort.property.test.ts | 74 + .../utils/__tests__/cliProviderSort.test.ts | 127 ++ .../__tests__/filterUtils.property.test.ts | 81 + .../lib/utils/__tests__/filterUtils.test.ts | 107 ++ .../lib/utils/__tests__/logAggregator.test.ts | 302 +++ .../src/lib/utils/__tests__/logFilter.test.ts | 148 ++ .../src/lib/utils/artifactDetector.ts | 175 ++ .../src/lib/utils/cliProviderSort.ts | 28 + .../src/lib/utils/codingNormalize.test.ts | 63 + .../src/lib/utils/codingNormalize.ts | 186 ++ .../src/lib/utils/codingProtocol.test.ts | 68 + .../src/lib/utils/codingProtocol.ts | 190 ++ .../src/lib/utils/fileTreeUtils.ts | 80 + .../src/lib/utils/filterUtils.ts | 43 + .../src/lib/utils/groupMessages.ts | 459 +++++ .../src/lib/utils/logAggregator.ts | 240 +++ .../src/lib/utils/logFilter.ts | 20 + .../src/lib/utils/workspaceApi.ts | 216 +++ .../src/lib/utils/wsUrl.test.ts | 108 ++ .../himarket-frontend/src/lib/utils/wsUrl.ts | 44 + himarket-web/himarket-frontend/src/main.tsx | 5 + .../himarket-frontend/src/pages/Coding.tsx | 898 +++++++++ .../src/pages/SkillDetail.tsx | 334 ++++ .../himarket-frontend/src/pages/Square.tsx | 38 +- himarket-web/himarket-frontend/src/router.tsx | 51 +- .../himarket-frontend/src/test/setup.ts | 6 + .../himarket-frontend/src/types/artifact.ts | 26 + .../src/types/coding-protocol.ts | 440 +++++ .../himarket-frontend/src/types/coding.ts | 57 + .../himarket-frontend/src/types/index.ts | 9 +- .../himarket-frontend/src/types/log.ts | 72 + .../himarket-frontend/tailwind.config.js | 7 + .../himarket-frontend/tsconfig.app.json | 1 + himarket-web/himarket-frontend/vite.config.ts | 28 +- .../himarket-frontend/vitest.config.ts | 16 + mvnw | 295 +++ mvnw.cmd | 189 ++ pom.xml | 38 +- sandbox/Dockerfile | 145 ++ sandbox/build.sh | 22 + sandbox/entrypoint.sh | 23 + sandbox/push-amd64.sh | 106 ++ sandbox/push.sh | 83 + sandbox/sidecar-server/config.js | 32 + sandbox/sidecar-server/index.js | 3 + sandbox/sidecar-server/lib/http.js | 32 + sandbox/sidecar-server/lib/path.js | 22 + sandbox/sidecar-server/lib/ring-buffer.js | 45 + sandbox/sidecar-server/lib/session.js | 194 ++ sandbox/sidecar-server/package.json | 18 + sandbox/sidecar-server/routes/exec.js | 84 + sandbox/sidecar-server/routes/files.js | 430 +++++ sandbox/sidecar-server/routes/health.js | 35 + sandbox/sidecar-server/routes/sessions.js | 55 + sandbox/sidecar-server/server.js | 146 ++ sandbox/sidecar-server/ws/cli.js | 229 +++ sandbox/sidecar-server/ws/terminal.js | 145 ++ build.sh => scripts/build.sh | 3 + scripts/get-token.sh | 46 + scripts/push-podman.sh | 157 ++ push.sh => scripts/push.sh | 22 +- scripts/run.sh | 158 ++ scripts/upload-skills.sh | 215 +++ 379 files changed, 48333 insertions(+), 1288 deletions(-) create mode 100644 .claude/skills/create-issue-himarket/SKILL.md create mode 100644 .claude/skills/create-pr-himarket/SKILL.md create mode 100644 .claude/skills/java-coding-standards/SKILL.md create mode 100644 .claude/skills/springboot-verification/SKILL.md create mode 100644 .claude/skills/tmux/SKILL.md create mode 100755 .claude/skills/tmux/scripts/find-sessions.sh create mode 100755 .claude/skills/tmux/scripts/wait-for-text.sh delete mode 100755 .husky/pre-commit create mode 100644 .mvn/wrapper/maven-wrapper.properties create mode 100644 .qoder/skills/create-issue-himarket/SKILL.md create mode 100644 .qoder/skills/create-pr-himarket/SKILL.md create mode 100644 .qoder/skills/java-coding-standards/SKILL.md create mode 100644 .qoder/skills/publish-skills/SKILL.md create mode 100644 .qoder/skills/springboot-verification/SKILL.md create mode 100644 .qoder/skills/tmux/SKILL.md create mode 100755 .qoder/skills/tmux/scripts/find-sessions.sh create mode 100755 .qoder/skills/tmux/scripts/wait-for-text.sh create mode 100644 AGENTS.md create mode 100644 CLAUDE.md create mode 100644 deploy/helm/charts/nacos/.helmignore create mode 100644 deploy/helm/charts/nacos/Chart.yaml create mode 100755 deploy/helm/charts/nacos/init-db.sh create mode 100644 deploy/helm/charts/nacos/nacos-installer.sh create mode 100644 deploy/helm/charts/nacos/sql/mysql-schema.sql create mode 100644 deploy/helm/charts/nacos/sql/pg-schema.sql create mode 100644 deploy/helm/charts/nacos/sql/upgrade/.gitkeep create mode 100644 deploy/helm/charts/nacos/templates/deployment.yaml create mode 100644 deploy/helm/charts/nacos/templates/service.yaml create mode 100644 deploy/helm/charts/nacos/values.yaml create mode 100644 deploy/helm/templates/sandbox-shared-deployment.yaml create mode 100644 deploy/helm/templates/sandbox-shared-pvc.yaml create mode 100644 deploy/helm/templates/sandbox-shared-service.yaml create mode 100644 docs/SANDBOX_GUIDE.md create mode 100644 docs/block-grouping.md create mode 100644 docs/nacos/himarket-nacos-skill-integration-spec.md create mode 100644 docs/skill-market-capability-checklist.md create mode 100644 himarket-bootstrap/src/main/java/com/alibaba/himarket/config/WebSocketConfig.java create mode 100644 himarket-bootstrap/src/main/resources/application-h2.yml create mode 100644 himarket-bootstrap/src/main/resources/db/migration/V10__Add_nacos_default_fields.sql create mode 100644 himarket-bootstrap/src/main/resources/db/migration/V11__Drop_k8s_cluster_table.sql create mode 100644 himarket-bootstrap/src/main/resources/db/migration/V12__Add_coding_session_table.sql create mode 100644 himarket-bootstrap/src/main/resources/db/migration/V13__Add_coding_session_model_fields.sql create mode 100644 himarket-bootstrap/src/main/resources/db/migration/V7__Add_k8s_cluster_table.sql create mode 100644 himarket-bootstrap/src/main/resources/db/migration/V8__Add_skill_file_table.sql create mode 100644 himarket-bootstrap/src/main/resources/db/migration/V9__Drop_skill_file_table.sql create mode 100644 himarket-dal/src/main/java/com/alibaba/himarket/entity/CodingSession.java create mode 100644 himarket-dal/src/main/java/com/alibaba/himarket/repository/CodingSessionRepository.java create mode 100644 himarket-dal/src/main/java/com/alibaba/himarket/support/product/SkillConfig.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/config/AcpProperties.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/controller/CliProviderController.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/controller/CodingSessionController.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/controller/SkillController.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/controller/WorkspaceController.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/core/skill/FileTreeBuilder.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillMdBuilder.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillMdDocument.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillMdParser.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillZipParser.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/dto/params/coding/CreateCodingSessionParam.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/dto/params/coding/UpdateCodingSessionParam.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/dto/params/product/UpdateSkillNacosParam.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketMcpInfo.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketMcpsResponse.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketModelInfo.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketModelsResponse.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketSkillInfo.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/dto/result/coding/CodingSessionResult.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/dto/result/skill/SkillFileContentResult.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/dto/result/skill/SkillFileTreeNode.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/CodingSessionService.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/SkillService.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/document/DocumentConversionService.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/RemoteWorkspaceService.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/ClaudeCodeConfigGenerator.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/CliConfigGenerator.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/CliConfigGeneratorRegistry.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/ConfigFileBuilder.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/NacosEnvGenerator.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/OpenCodeConfigGenerator.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/ProtocolTypeMapper.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/QoderCliConfigGenerator.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/QwenCodeConfigGenerator.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/BaseUrlExtractor.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileEntry.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileInfo.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileSystemAdapter.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileSystemException.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/PathValidator.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/SidecarFileSystemAdapter.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RemoteRuntimeAdapter.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeAdapter.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeConfig.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeFaultNotification.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeStatus.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/ConfigFile.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/E2BSandboxProvider.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/ExecResult.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/OpenSandboxProvider.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/PodInfo.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/RemoteSandboxProvider.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxConfig.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxHttpClient.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxInfo.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxProvider.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxProviderRegistry.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxType.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/ConfigInjectionPhase.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/FileSystemReadyPhase.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitConfig.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitContext.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitErrorCode.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitEvent.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitPhase.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitPhaseException.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitResult.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/PhaseStatus.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/RetryPolicy.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxAcquirePhase.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxInitPipeline.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SidecarConnectPhase.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SkillDownloadPhase.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/CliSessionConfig.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/CustomModelConfig.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/McpConfigResolver.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/ModelConfigResolver.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/ResolvedSessionConfig.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/SessionConfigResolver.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/SessionInitializer.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/RemoteTerminalBackend.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/TerminalBackend.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/TerminalProcess.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/TerminalWebSocketHandler.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/CliProcess.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingConnectionManager.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingHandshakeInterceptor.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingMessageRouter.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingWebSocketHandler.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/WebSocketPingScheduler.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/impl/CodingSessionServiceImpl.java create mode 100644 himarket-server/src/main/java/com/alibaba/himarket/service/impl/SkillServiceImpl.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/controller/CliProviderControllerTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/core/skill/SkillMdParserTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/HiCodingFullE2ETest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/cli/QwenCodeConfigGeneratorMcpTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/cli/QwenCodeConfigGeneratorSkillTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/filesystem/PathTraversalPropertyTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/filesystem/PathValidatorTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeFaultNotificationPropertyTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeFaultNotificationTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxInfoPropertyTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxProviderRegistryTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitPhasesTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxInitPipelinePropertyTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxInitPipelineTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/session/QwenCodeAuthFlowTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/session/SessionModelsTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/AcpPropertiesTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/CliProcessMultiCliTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingAuthenticationTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingHandshakeInterceptorTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingPromptExecutionTest.java create mode 100644 himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingWebSocketE2ETest.java create mode 100644 himarket-web/himarket-admin/.npmrc create mode 100644 himarket-web/himarket-admin/src/components/api-product/ApiProductLinkNacos.tsx create mode 100644 himarket-web/himarket-admin/src/components/api-product/ApiProductSkillPackage.tsx create mode 100644 himarket-web/himarket-admin/src/components/api-product/SkillConfigForm.tsx create mode 100644 himarket-web/himarket-frontend/.npmrc delete mode 100644 himarket-web/himarket-frontend/.vite/deps/_metadata.json delete mode 100644 himarket-web/himarket-frontend/.vite/deps/package.json delete mode 100644 himarket-web/himarket-frontend/IMPLEMENTATION.md create mode 100644 himarket-web/himarket-frontend/src/components/coding/ActivityGroupCard.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/AgentMessage.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ArtifactPreview.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ChangesView.test.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ChangesView.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ChatStream.test.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ChatStream.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/CliProviderSelect.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/CodingInput.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/CodingTopBar.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ConfigDropdowns.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ConfigSidebar.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ConnectionBanner.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ConversationTopBar.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/DiffViewer.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/EditorArea.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ErrorMessage.test.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ErrorMessage.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/FileMentionMenu.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/FileTree.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/InlineArtifact.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/PermissionDialog.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/PlanDisplay.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/PreviewPanel.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/SandboxStatusCard.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/SessionSidebar.css create mode 100644 himarket-web/himarket-frontend/src/components/coding/SessionSidebar.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/SlashMenu.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/TerminalOutput.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/TerminalPanel.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/TerminalView.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ThoughtBlock.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ToolCallCard.test.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ToolCallCard.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ToolDetails.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/ToolPanel.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/UserMessage.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/renderers/ArtifactRenderer.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/renderers/FileRenderer.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/renderers/HtmlRenderer.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/renderers/ImageRenderer.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/renderers/MarkdownRenderer.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/renderers/PdfRenderer.tsx create mode 100644 himarket-web/himarket-frontend/src/components/coding/renderers/SvgRenderer.tsx create mode 100644 himarket-web/himarket-frontend/src/components/common/CliSelector.tsx create mode 100644 himarket-web/himarket-frontend/src/components/common/CustomModelForm.tsx create mode 100644 himarket-web/himarket-frontend/src/components/common/MarketMcpSelector.tsx create mode 100644 himarket-web/himarket-frontend/src/components/common/MarketModelSelector.tsx create mode 100644 himarket-web/himarket-frontend/src/components/common/MarketSkillSelector.tsx create mode 100644 himarket-web/himarket-frontend/src/components/common/SearchFilterInput.tsx create mode 100644 himarket-web/himarket-frontend/src/components/common/SelectableCard.tsx create mode 100644 himarket-web/himarket-frontend/src/components/common/WelcomePage.tsx create mode 100644 himarket-web/himarket-frontend/src/components/common/__tests__/SelectableCard.test.tsx create mode 100644 himarket-web/himarket-frontend/src/components/common/__tests__/cliSelection.property.test.ts create mode 100644 himarket-web/himarket-frontend/src/components/common/__tests__/stepCalculation.property.test.ts create mode 100644 himarket-web/himarket-frontend/src/components/common/__tests__/stepUtils.test.ts create mode 100644 himarket-web/himarket-frontend/src/components/common/stepUtils.ts create mode 100644 himarket-web/himarket-frontend/src/components/skill/InstallCommand.tsx create mode 100644 himarket-web/himarket-frontend/src/components/skill/RelatedSkills.tsx create mode 100644 himarket-web/himarket-frontend/src/components/skill/SkillFileTree.tsx create mode 100644 himarket-web/himarket-frontend/src/components/skill/SkillMdViewer.tsx create mode 100644 himarket-web/himarket-frontend/src/components/square/SkillCard.tsx create mode 100644 himarket-web/himarket-frontend/src/context/CodingSessionContext.test.ts create mode 100644 himarket-web/himarket-frontend/src/context/CodingSessionContext.tsx create mode 100644 himarket-web/himarket-frontend/src/hooks/__tests__/useCodingSession.test.ts create mode 100644 himarket-web/himarket-frontend/src/hooks/useCodingConfig.ts create mode 100644 himarket-web/himarket-frontend/src/hooks/useCodingSession.ts create mode 100644 himarket-web/himarket-frontend/src/hooks/useCodingWebSocket.ts create mode 100644 himarket-web/himarket-frontend/src/hooks/useResizable.ts create mode 100644 himarket-web/himarket-frontend/src/hooks/useTerminalWebSocket.ts create mode 100644 himarket-web/himarket-frontend/src/lib/apis/cliProvider.ts create mode 100644 himarket-web/himarket-frontend/src/lib/apis/codingSession.ts create mode 100644 himarket-web/himarket-frontend/src/lib/skillMdUtils.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/__tests__/cliProviderSort.property.test.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/__tests__/cliProviderSort.test.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/__tests__/filterUtils.property.test.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/__tests__/filterUtils.test.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/__tests__/logAggregator.test.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/__tests__/logFilter.test.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/artifactDetector.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/cliProviderSort.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/codingNormalize.test.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/codingNormalize.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/codingProtocol.test.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/codingProtocol.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/fileTreeUtils.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/filterUtils.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/groupMessages.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/logAggregator.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/logFilter.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/workspaceApi.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/wsUrl.test.ts create mode 100644 himarket-web/himarket-frontend/src/lib/utils/wsUrl.ts create mode 100644 himarket-web/himarket-frontend/src/pages/Coding.tsx create mode 100644 himarket-web/himarket-frontend/src/pages/SkillDetail.tsx create mode 100644 himarket-web/himarket-frontend/src/test/setup.ts create mode 100644 himarket-web/himarket-frontend/src/types/artifact.ts create mode 100644 himarket-web/himarket-frontend/src/types/coding-protocol.ts create mode 100644 himarket-web/himarket-frontend/src/types/coding.ts create mode 100644 himarket-web/himarket-frontend/src/types/log.ts create mode 100644 himarket-web/himarket-frontend/vitest.config.ts create mode 100755 mvnw create mode 100644 mvnw.cmd create mode 100644 sandbox/Dockerfile create mode 100755 sandbox/build.sh create mode 100755 sandbox/entrypoint.sh create mode 100755 sandbox/push-amd64.sh create mode 100755 sandbox/push.sh create mode 100644 sandbox/sidecar-server/config.js create mode 100644 sandbox/sidecar-server/index.js create mode 100644 sandbox/sidecar-server/lib/http.js create mode 100644 sandbox/sidecar-server/lib/path.js create mode 100644 sandbox/sidecar-server/lib/ring-buffer.js create mode 100644 sandbox/sidecar-server/lib/session.js create mode 100644 sandbox/sidecar-server/package.json create mode 100644 sandbox/sidecar-server/routes/exec.js create mode 100644 sandbox/sidecar-server/routes/files.js create mode 100644 sandbox/sidecar-server/routes/health.js create mode 100644 sandbox/sidecar-server/routes/sessions.js create mode 100644 sandbox/sidecar-server/server.js create mode 100644 sandbox/sidecar-server/ws/cli.js create mode 100644 sandbox/sidecar-server/ws/terminal.js rename build.sh => scripts/build.sh (93%) mode change 100644 => 100755 create mode 100755 scripts/get-token.sh create mode 100755 scripts/push-podman.sh rename push.sh => scripts/push.sh (90%) mode change 100644 => 100755 create mode 100755 scripts/run.sh create mode 100755 scripts/upload-skills.sh diff --git a/.claude/skills/create-issue-himarket/SKILL.md b/.claude/skills/create-issue-himarket/SKILL.md new file mode 100644 index 000000000..244461d22 --- /dev/null +++ b/.claude/skills/create-issue-himarket/SKILL.md @@ -0,0 +1,163 @@ +--- +name: create-issue-himarket +description: "通过自然语言在 HiMarket 社区创建 Issue。支持 Feature Request(功能请求)和 Bug Report(问题报告)两种类型。当用户想要向 HiMarket 提交功能建议或报告问题时使用此 skill。" +--- + +# 创建 HiMarket Issue + +## 概述 + +帮助用户通过自然语言描述,在 [higress-group/himarket](https://github.com/higress-group/himarket) 仓库创建规范的 Issue。 + +## Issue 类型 + +HiMarket 支持两种 Issue 类型: + +### 1. Feature Request(功能请求) + +用于提交新功能建议或改进想法。 + +**必填信息:** +- **Why do you need it?** - 为什么需要这个功能?描述你遇到的问题或痛点 +- **How could it be?** - 期望的功能是什么样的?描述输入和输出 +- **Other related information** - 其他相关信息、截图或上下文(可选) + +### 2. Bug Report(问题报告) + +用于报告 Bug 或异常行为。 + +**必填信息:** +- **Issue Description** - 问题简述 +- **What happened** - 发生了什么?包括异常堆栈信息 +- **Expected behavior** - 期望的行为是什么 +- **Reproduction steps** - 最小化的复现步骤 +- **Environment** - 环境信息(可选) + +**可选信息:** +- **Anything else** - 其他补充信息 +- **Root Cause** - 根因分析(如已定位) +- **Proposed Solution** - 建议的解决方案 + +## 工作流程 + +### 步骤 1:确定 Issue 类型 + +首先询问用户要创建的 Issue 类型: +- 功能请求(Feature Request) +- 问题报告(Bug Report) + +### 步骤 2:收集必要信息 + +根据 Issue 类型,通过对话逐步收集必要信息: + +**对于功能请求:** +1. 询问为什么需要这个功能(遇到了什么问题) +2. 询问期望的功能实现方式 +3. 询问是否有其他补充信息 + +**对于问题报告:** +1. 询问问题的简要描述 +2. 询问具体发生了什么(包括错误信息) +3. 询问期望的正确行为 +4. 询问复现步骤 +5. 询问是否已定位根因或有解决方案建议 + +### 步骤 3:生成 Issue 内容 + +根据收集的信息,按模板格式生成 Issue 内容,展示给用户确认。 + +### 步骤 4:创建 Issue + +确认后,使用 GitHub CLI 创建 Issue: + +```bash +# Feature Request +gh issue create --repo higress-group/himarket \ + --title "[Feature] <标题>" \ + --body "$(cat <<'EOF' +## Why do you need it? +<用户描述的问题/痛点> + +## How could it be? +<期望的功能实现> + +## Other related information +<其他补充信息> + +--- +**Type**: Feature Request +EOF +)" + +# Bug Report +gh issue create --repo higress-group/himarket \ + --title "[Bug] <标题>" \ + --body "$(cat <<'EOF' +## Issue Description +<问题简述> + +## What happened +<发生了什么> + +``` +<异常堆栈或错误信息> +``` + +## Expected behavior +<期望行为> + +## Reproduction steps +1. <步骤1> +2. <步骤2> +3. <步骤3> + +## Root Cause (if known) +<根因分析> + +## Proposed Solution (if any) +<建议的解决方案> + +## Files Changed (if applicable) +- `path/to/file1` +- `path/to/file2` + +--- +**Type**: Bug Report +**Component**: +EOF +)" +``` + +## 关键原则 + +- **一次一个问题** - 逐步收集信息,不要一次性询问太多 +- **提供示例** - 在询问时给出示例帮助用户理解 +- **确认重复** - 提醒用户先搜索是否有类似 Issue +- **语言选择** - 支持用户用中文或英文描述 +- **预览确认** - 创建前展示完整内容让用户确认 +- **返回链接** - 创建成功后返回 Issue URL +- **标题前缀** - Bug 使用 `[Bug]`,功能请求使用 `[Feature]`,也可按组件添加如 `[Frontend]` + +## 示例对话 + +**用户:** 我想给 HiMarket 提一个功能建议 + +**助手:** 好的,我来帮你创建一个 Feature Request。请问你为什么需要这个功能?遇到了什么问题或痛点? + +**用户:** 聊天页面停止按钮点击没反应 + +**助手:** 明白了,这是一个 Bug Report。请问具体是什么情况?在什么操作下发现的? + +**用户:** 模型推理过程中点击停止按钮无法取消 + +**助手:** 好的,我整理了以下 Issue 内容,请确认: + +> **Title:** [Frontend] HiChat 停止按钮无法取消模型推理 +> +> ## Issue Description +> HiChat 聊天页面在模型推理过程中,停止按钮点击无响应。 +> +> ## Expected behavior +> 点击停止按钮后应中断推理过程。 + +确认无误后我将创建这个 Issue。 diff --git a/.claude/skills/create-pr-himarket/SKILL.md b/.claude/skills/create-pr-himarket/SKILL.md new file mode 100644 index 000000000..f57d682f4 --- /dev/null +++ b/.claude/skills/create-pr-himarket/SKILL.md @@ -0,0 +1,278 @@ +--- +name: create-pr-himarket +description: "为 HiMarket 项目创建符合规范的 Pull Request。当用户需要提交代码、推送分支或创建 PR 时使用此 skill,确保 PR 标题和内容符合项目 CI 检查要求。" +--- + +# 创建 HiMarket Pull Request + +## 概述 + +帮助用户在 [higress-group/himarket](https://github.com/higress-group/himarket) 仓库创建符合规范的 Pull Request,确保通过 PR Title Check、PR Content Check 和 PR Size Check。 + +## PR 标题格式 + +### 必需格式 + +``` +type: 简短描述 +``` + +或带范围: + +``` +type(scope): 简短描述 +``` + +### 允许的 Type + +| Type | 说明 | 示例 | +|------|------|------| +| `feat` | 新功能 | `feat: add user authentication` | +| `fix` | Bug 修复 | `fix: resolve memory leak` | +| `docs` | 文档更新 | `docs: update API documentation` | +| `style` | 代码格式 | `style: format with prettier` | +| `refactor` | 重构 | `refactor: simplify service logic` | +| `perf` | 性能优化 | `perf: optimize queries` | +| `test` | 测试 | `test: add unit tests` | +| `build` | 构建系统 | `build: update dependencies` | +| `ci` | CI/CD | `ci: add workflow` | +| `chore` | 其他变更 | `chore: update gitignore` | +| `revert` | 回滚 | `revert: revert commit abc123` | + +### 标题规则 + +1. 必须包含 type 前缀 +2. type 后需要冒号和空格:`feat: ` 而不是 `feat:` +3. 描述必须以**小写字母**开头 +4. 保持简短清晰(建议 < 50 字符) + +## PR 内容格式(必填) + +### 必填部分 + +#### 1. Description(必填) + +必须包含 `## 📝 Description` 部分,且内容至少 10 个字符。 + +```markdown +## 📝 Description + +- 变更点 1 +- 变更点 2 +- 变更点 3 +``` + +#### 2. Type of Change(必填) + +必须至少勾选一项变更类型。 + +```markdown +## ✅ Type of Change + +- [x] Bug fix (non-breaking change) +- [ ] New feature (non-breaking change) +- [ ] Breaking change +- [ ] Documentation update +- [ ] Code refactoring +- [ ] Performance improvement +``` + +### 可选部分 + +#### 3. Related Issues(推荐) + +```markdown +## 🔗 Related Issues + +Fix #123 +Close #456 +``` + +#### 4. Testing(推荐) + +```markdown +## 🧪 Testing + +- [x] Unit tests added/updated +- [x] Manual testing completed +- 测试场景描述 +``` + +#### 5. Checklist(推荐) + +```markdown +## 📋 Checklist + +- [x] Code has been formatted (`mvn spotless:apply` for backend, `npm run lint:fix` for frontend) +- [x] Code is self-reviewed +- [x] No breaking changes +- [x] All CI checks pass +``` + +## 工作流程 + +### 步骤 1:检查当前状态 + +```bash +# 并行执行以下命令 +git status +git diff --stat +git log --oneline -5 +git branch -a +git remote -v +``` + +### 步骤 2:确认分支和远程 + +- 确认当前分支名称 +- 确认 origin 指向用户的 fork(如 `lexburner/himarket`) +- 确认 upstream 指向主仓库(`higress-group/himarket`) + +### 步骤 3:推送分支 + +```bash +git push -u origin +``` + +### 步骤 4:创建 PR + +使用 HEREDOC 格式确保内容正确: + +```bash +gh pr create --repo higress-group/himarket \ + --base main \ + --head : \ + --title "feat: 简短描述" \ + --body "$(cat <<'EOF' +## 📝 Description + +- 变更点 1 +- 变更点 2 + +## 🔗 Related Issues + +Fix #123 + +## ✅ Type of Change + +- [ ] Bug fix (non-breaking change) +- [x] New feature (non-breaking change) +- [ ] Breaking change +- [ ] Documentation update +- [ ] Code refactoring +- [ ] Performance improvement + +## 🧪 Testing + +- [x] Unit tests pass locally +- [x] Manual testing completed + +## 📋 Checklist + +- [x] Code has been formatted (`mvn spotless:apply` for backend, `npm run lint:fix` for frontend) +- [x] Code is self-reviewed +- [x] No breaking changes +- [x] All CI checks pass +EOF +)" +``` + +### 步骤 5:验证检查状态 + +```bash +gh pr checks --repo higress-group/himarket +``` + +确保以下检查通过: +- PR Title Check +- PR Content Check +- PR Size Check +- PR Validation Summary + +## 关键原则 + +- **标题小写** - 描述部分必须以小写字母开头 +- **内容完整** - 必须包含 `## 📝 Description` 和 `## ✅ Type of Change` +- **勾选类型** - Type of Change 必须至少勾选一项 `[x]` +- **关联 Issue** - 推荐使用 `Fix #123` 格式关联 Issue +- **格式化代码** - 提交前运行 `mvn spotless:apply` 或 `npm run lint:fix` +- **不提交图片** - 避免将截图等二进制文件提交到仓库 + +## 常见错误 + +### 错误 1:标题首字母大写 + +``` +❌ feat: Add new feature +✅ feat: add new feature +``` + +### 错误 2:缺少 Description 标题 + +```markdown +❌ 直接写内容 +✅ ## 📝 Description + 内容 +``` + +### 错误 3:未勾选 Type of Change + +```markdown +❌ - [ ] New feature +✅ - [x] New feature +``` + +### 错误 4:Description 内容太短 + +```markdown +❌ ## 📝 Description + Fix bug + +✅ ## 📝 Description + Fix pagination bug in product list +``` + +## 完整示例 + +**标题:** +``` +feat(chat): add tool call support and stop generation feature +``` + +**内容:** +```markdown +## 📝 Description + +- 添加聊天工具调用(Tool Call)支持,工具执行状态按消息顺序内联展示 +- 添加停止生成过程功能,支持中断正在进行的 AI 回复 +- 优化模型推理时滚动条自由滑动体验 + +## 🔗 Related Issues + +Fix #163 +Fix #164 +Fix #165 + +## ✅ Type of Change + +- [x] Bug fix (non-breaking change) +- [x] New feature (non-breaking change) +- [ ] Breaking change +- [ ] Documentation update +- [ ] Code refactoring +- [ ] Performance improvement + +## 🧪 Testing + +- [x] Unit tests pass locally +- [x] Manual testing completed +- 测试停止按钮能否正常中断 SSE 流式请求 +- 测试模型推理时滚动条是否可自由滑动 + +## 📋 Checklist + +- [x] Code has been formatted (`mvn spotless:apply` for backend, `npm run lint:fix` for frontend) +- [x] Code is self-reviewed +- [x] No breaking changes +- [x] All CI checks pass +``` diff --git a/.claude/skills/java-coding-standards/SKILL.md b/.claude/skills/java-coding-standards/SKILL.md new file mode 100644 index 000000000..af9902559 --- /dev/null +++ b/.claude/skills/java-coding-standards/SKILL.md @@ -0,0 +1,147 @@ +--- +name: java-coding-standards +description: "Java coding standards for Spring Boot services: naming, immutability, Optional usage, streams, exceptions, generics, and project layout." +origin: ECC +--- + +# Java Coding Standards + +Standards for readable, maintainable Java (17+) code in Spring Boot services. + +## When to Activate + +- Writing or reviewing Java code in Spring Boot projects +- Enforcing naming, immutability, or exception handling conventions +- Working with records, sealed classes, or pattern matching (Java 17+) +- Reviewing use of Optional, streams, or generics +- Structuring packages and project layout + +## Core Principles + +- Prefer clarity over cleverness +- Immutable by default; minimize shared mutable state +- Fail fast with meaningful exceptions +- Consistent naming and package structure + +## Naming + +```java +// ✅ Classes/Records: PascalCase +public class MarketService {} +public record Money(BigDecimal amount, Currency currency) {} + +// ✅ Methods/fields: camelCase +private final MarketRepository marketRepository; +public Market findBySlug(String slug) {} + +// ✅ Constants: UPPER_SNAKE_CASE +private static final int MAX_PAGE_SIZE = 100; +``` + +## Immutability + +```java +// ✅ Favor records and final fields +public record MarketDto(Long id, String name, MarketStatus status) {} + +public class Market { + private final Long id; + private final String name; + // getters only, no setters +} +``` + +## Optional Usage + +```java +// ✅ Return Optional from find* methods +Optional market = marketRepository.findBySlug(slug); + +// ✅ Map/flatMap instead of get() +return market + .map(MarketResponse::from) + .orElseThrow(() -> new EntityNotFoundException("Market not found")); +``` + +## Streams Best Practices + +```java +// ✅ Use streams for transformations, keep pipelines short +List names = markets.stream() + .map(Market::name) + .filter(Objects::nonNull) + .toList(); + +// ❌ Avoid complex nested streams; prefer loops for clarity +``` + +## Exceptions + +- Use unchecked exceptions for domain errors; wrap technical exceptions with context +- Create domain-specific exceptions (e.g., `MarketNotFoundException`) +- Avoid broad `catch (Exception ex)` unless rethrowing/logging centrally + +```java +throw new MarketNotFoundException(slug); +``` + +## Generics and Type Safety + +- Avoid raw types; declare generic parameters +- Prefer bounded generics for reusable utilities + +```java +public Map indexById(Collection items) { ... } +``` + +## Project Structure (Maven/Gradle) + +``` +src/main/java/com/example/app/ + config/ + controller/ + service/ + repository/ + domain/ + dto/ + util/ +src/main/resources/ + application.yml +src/test/java/... (mirrors main) +``` + +## Formatting and Style + +- Use 2 or 4 spaces consistently (project standard) +- One public top-level type per file +- Keep methods short and focused; extract helpers +- Order members: constants, fields, constructors, public methods, protected, private + +## Code Smells to Avoid + +- Long parameter lists → use DTO/builders +- Deep nesting → early returns +- Magic numbers → named constants +- Static mutable state → prefer dependency injection +- Silent catch blocks → log and act or rethrow + +## Logging + +```java +private static final Logger log = LoggerFactory.getLogger(MarketService.class); +log.info("fetch_market slug={}", slug); +log.error("failed_fetch_market slug={}", slug, ex); +``` + +## Null Handling + +- Accept `@Nullable` only when unavoidable; otherwise use `@NonNull` +- Use Bean Validation (`@NotNull`, `@NotBlank`) on inputs + +## Testing Expectations + +- JUnit 5 + AssertJ for fluent assertions +- Mockito for mocking; avoid partial mocks where possible +- Favor deterministic tests; no hidden sleeps + +**Remember**: Keep code intentional, typed, and observable. Optimize for maintainability over micro-optimizations unless proven necessary. diff --git a/.claude/skills/springboot-verification/SKILL.md b/.claude/skills/springboot-verification/SKILL.md new file mode 100644 index 000000000..c8f790aac --- /dev/null +++ b/.claude/skills/springboot-verification/SKILL.md @@ -0,0 +1,231 @@ +--- +name: springboot-verification +description: "Verification loop for Spring Boot projects: build, static analysis, tests with coverage, security scans, and diff review before release or PR." +origin: ECC +--- + +# Spring Boot Verification Loop + +Run before PRs, after major changes, and pre-deploy. + +## When to Activate + +- Before opening a pull request for a Spring Boot service +- After major refactoring or dependency upgrades +- Pre-deployment verification for staging or production +- Running full build → lint → test → security scan pipeline +- Validating test coverage meets thresholds + +## Phase 1: Build + +```bash +mvn -T 4 clean verify -DskipTests +# or +./gradlew clean assemble -x test +``` + +If build fails, stop and fix. + +## Phase 2: Static Analysis + +Maven (common plugins): +```bash +mvn -T 4 spotbugs:check pmd:check checkstyle:check +``` + +Gradle (if configured): +```bash +./gradlew checkstyleMain pmdMain spotbugsMain +``` + +## Phase 3: Tests + Coverage + +```bash +mvn -T 4 test +mvn jacoco:report # verify 80%+ coverage +# or +./gradlew test jacocoTestReport +``` + +Report: +- Total tests, passed/failed +- Coverage % (lines/branches) + +### Unit Tests + +Test service logic in isolation with mocked dependencies: + +```java +@ExtendWith(MockitoExtension.class) +class UserServiceTest { + + @Mock private UserRepository userRepository; + @InjectMocks private UserService userService; + + @Test + void createUser_validInput_returnsUser() { + var dto = new CreateUserDto("Alice", "alice@example.com"); + var expected = new User(1L, "Alice", "alice@example.com"); + when(userRepository.save(any(User.class))).thenReturn(expected); + + var result = userService.create(dto); + + assertThat(result.name()).isEqualTo("Alice"); + verify(userRepository).save(any(User.class)); + } + + @Test + void createUser_duplicateEmail_throwsException() { + var dto = new CreateUserDto("Alice", "existing@example.com"); + when(userRepository.existsByEmail(dto.email())).thenReturn(true); + + assertThatThrownBy(() -> userService.create(dto)) + .isInstanceOf(DuplicateEmailException.class); + } +} +``` + +### Integration Tests with Testcontainers + +Test against a real database instead of H2: + +```java +@SpringBootTest +@Testcontainers +class UserRepositoryIntegrationTest { + + @Container + static PostgreSQLContainer postgres = new PostgreSQLContainer<>("postgres:16-alpine") + .withDatabaseName("testdb"); + + @DynamicPropertySource + static void configureProperties(DynamicPropertyRegistry registry) { + registry.add("spring.datasource.url", postgres::getJdbcUrl); + registry.add("spring.datasource.username", postgres::getUsername); + registry.add("spring.datasource.password", postgres::getPassword); + } + + @Autowired private UserRepository userRepository; + + @Test + void findByEmail_existingUser_returnsUser() { + userRepository.save(new User("Alice", "alice@example.com")); + + var found = userRepository.findByEmail("alice@example.com"); + + assertThat(found).isPresent(); + assertThat(found.get().getName()).isEqualTo("Alice"); + } +} +``` + +### API Tests with MockMvc + +Test controller layer with full Spring context: + +```java +@WebMvcTest(UserController.class) +class UserControllerTest { + + @Autowired private MockMvc mockMvc; + @MockBean private UserService userService; + + @Test + void createUser_validInput_returns201() throws Exception { + var user = new UserDto(1L, "Alice", "alice@example.com"); + when(userService.create(any())).thenReturn(user); + + mockMvc.perform(post("/api/users") + .contentType(MediaType.APPLICATION_JSON) + .content(""" + {"name": "Alice", "email": "alice@example.com"} + """)) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$.name").value("Alice")); + } + + @Test + void createUser_invalidEmail_returns400() throws Exception { + mockMvc.perform(post("/api/users") + .contentType(MediaType.APPLICATION_JSON) + .content(""" + {"name": "Alice", "email": "not-an-email"} + """)) + .andExpect(status().isBadRequest()); + } +} +``` + +## Phase 4: Security Scan + +```bash +# Dependency CVEs +mvn org.owasp:dependency-check-maven:check +# or +./gradlew dependencyCheckAnalyze + +# Secrets in source +grep -rn "password\s*=\s*\"" src/ --include="*.java" --include="*.yml" --include="*.properties" +grep -rn "sk-\|api_key\|secret" src/ --include="*.java" --include="*.yml" + +# Secrets (git history) +git secrets --scan # if configured +``` + +### Common Security Findings + +``` +# Check for System.out.println (use logger instead) +grep -rn "System\.out\.print" src/main/ --include="*.java" + +# Check for raw exception messages in responses +grep -rn "e\.getMessage()" src/main/ --include="*.java" + +# Check for wildcard CORS +grep -rn "allowedOrigins.*\*" src/main/ --include="*.java" +``` + +## Phase 5: Lint/Format (optional gate) + +```bash +mvn spotless:apply # if using Spotless plugin +./gradlew spotlessApply +``` + +## Phase 6: Diff Review + +```bash +git diff --stat +git diff +``` + +Checklist: +- No debugging logs left (`System.out`, `log.debug` without guards) +- Meaningful errors and HTTP statuses +- Transactions and validation present where needed +- Config changes documented + +## Output Template + +``` +VERIFICATION REPORT +=================== +Build: [PASS/FAIL] +Static: [PASS/FAIL] (spotbugs/pmd/checkstyle) +Tests: [PASS/FAIL] (X/Y passed, Z% coverage) +Security: [PASS/FAIL] (CVE findings: N) +Diff: [X files changed] + +Overall: [READY / NOT READY] + +Issues to Fix: +1. ... +2. ... +``` + +## Continuous Mode + +- Re-run phases on significant changes or every 30–60 minutes in long sessions +- Keep a short loop: `mvn -T 4 test` + spotbugs for quick feedback + +**Remember**: Fast feedback beats late surprises. Keep the gate strict—treat warnings as defects in production systems. diff --git a/.claude/skills/tmux/SKILL.md b/.claude/skills/tmux/SKILL.md new file mode 100644 index 000000000..fa589c0e2 --- /dev/null +++ b/.claude/skills/tmux/SKILL.md @@ -0,0 +1,153 @@ +--- +name: tmux +description: Remote-control tmux sessions for interactive CLIs by sending keystrokes and scraping pane output. +metadata: + { "openclaw": { "emoji": "🧵", "os": ["darwin", "linux"], "requires": { "bins": ["tmux"] } } } +--- + +# tmux Session Control + +Control tmux sessions by sending keystrokes and reading output. Essential for managing Claude Code sessions. + +## When to Use + +✅ **USE this skill when:** + +- Monitoring Claude/Codex sessions in tmux +- Sending input to interactive terminal applications +- Scraping output from long-running processes in tmux +- Navigating tmux panes/windows programmatically +- Checking on background work in existing sessions + +## When NOT to Use + +❌ **DON'T use this skill when:** + +- Running one-off shell commands → use `exec` tool directly +- Starting new background processes → use `exec` with `background:true` +- Non-interactive scripts → use `exec` tool +- The process isn't in tmux +- You need to create a new tmux session → use `exec` with `tmux new-session` + +## Example Sessions + +| Session | Purpose | +| ----------------------- | --------------------------- | +| `shared` | Primary interactive session | +| `worker-2` - `worker-8` | Parallel worker sessions | + +## Common Commands + +### List Sessions + +```bash +tmux list-sessions +tmux ls +``` + +### Capture Output + +```bash +# Last 20 lines of pane +tmux capture-pane -t shared -p | tail -20 + +# Entire scrollback +tmux capture-pane -t shared -p -S - + +# Specific pane in window +tmux capture-pane -t shared:0.0 -p +``` + +### Send Keys + +```bash +# Send text (doesn't press Enter) +tmux send-keys -t shared "hello" + +# Send text + Enter +tmux send-keys -t shared "y" Enter + +# Send special keys +tmux send-keys -t shared Enter +tmux send-keys -t shared Escape +tmux send-keys -t shared C-c # Ctrl+C +tmux send-keys -t shared C-d # Ctrl+D (EOF) +tmux send-keys -t shared C-z # Ctrl+Z (suspend) +``` + +### Window/Pane Navigation + +```bash +# Select window +tmux select-window -t shared:0 + +# Select pane +tmux select-pane -t shared:0.1 + +# List windows +tmux list-windows -t shared +``` + +### Session Management + +```bash +# Create new session +tmux new-session -d -s newsession + +# Kill session +tmux kill-session -t sessionname + +# Rename session +tmux rename-session -t old new +``` + +## Sending Input Safely + +For interactive TUIs (Claude Code, Codex, etc.), split text and Enter into separate sends to avoid paste/multiline edge cases: + +```bash +tmux send-keys -t shared -l -- "Please apply the patch in src/foo.ts" +sleep 0.1 +tmux send-keys -t shared Enter +``` + +## Claude Code Session Patterns + +### Check if Session Needs Input + +```bash +# Look for prompts +tmux capture-pane -t worker-3 -p | tail -10 | grep -E "❯|Yes.*No|proceed|permission" +``` + +### Approve Claude Code Prompt + +```bash +# Send 'y' and Enter +tmux send-keys -t worker-3 'y' Enter + +# Or select numbered option +tmux send-keys -t worker-3 '2' Enter +``` + +### Check All Sessions Status + +```bash +for s in shared worker-2 worker-3 worker-4 worker-5 worker-6 worker-7 worker-8; do + echo "=== $s ===" + tmux capture-pane -t $s -p 2>/dev/null | tail -5 +done +``` + +### Send Task to Session + +```bash +tmux send-keys -t worker-4 "Fix the bug in auth.js" Enter +``` + +## Notes + +- Use `capture-pane -p` to print to stdout (essential for scripting) +- `-S -` captures entire scrollback history +- Target format: `session:window.pane` (e.g., `shared:0.0`) +- Sessions persist across SSH disconnects diff --git a/.claude/skills/tmux/scripts/find-sessions.sh b/.claude/skills/tmux/scripts/find-sessions.sh new file mode 100755 index 000000000..8387c1629 --- /dev/null +++ b/.claude/skills/tmux/scripts/find-sessions.sh @@ -0,0 +1,112 @@ +#!/usr/bin/env bash +set -euo pipefail + +usage() { + cat <<'USAGE' +Usage: find-sessions.sh [-L socket-name|-S socket-path|-A] [-q pattern] + +List tmux sessions on a socket (default tmux socket if none provided). + +Options: + -L, --socket tmux socket name (passed to tmux -L) + -S, --socket-path tmux socket path (passed to tmux -S) + -A, --all scan all sockets under OPENCLAW_TMUX_SOCKET_DIR + -q, --query case-insensitive substring to filter session names + -h, --help show this help +USAGE +} + +socket_name="" +socket_path="" +query="" +scan_all=false +socket_dir="${OPENCLAW_TMUX_SOCKET_DIR:-${CLAWDBOT_TMUX_SOCKET_DIR:-${TMPDIR:-/tmp}/openclaw-tmux-sockets}}" + +while [[ $# -gt 0 ]]; do + case "$1" in + -L|--socket) socket_name="${2-}"; shift 2 ;; + -S|--socket-path) socket_path="${2-}"; shift 2 ;; + -A|--all) scan_all=true; shift ;; + -q|--query) query="${2-}"; shift 2 ;; + -h|--help) usage; exit 0 ;; + *) echo "Unknown option: $1" >&2; usage; exit 1 ;; + esac +done + +if [[ "$scan_all" == true && ( -n "$socket_name" || -n "$socket_path" ) ]]; then + echo "Cannot combine --all with -L or -S" >&2 + exit 1 +fi + +if [[ -n "$socket_name" && -n "$socket_path" ]]; then + echo "Use either -L or -S, not both" >&2 + exit 1 +fi + +if ! command -v tmux >/dev/null 2>&1; then + echo "tmux not found in PATH" >&2 + exit 1 +fi + +list_sessions() { + local label="$1"; shift + local tmux_cmd=(tmux "$@") + + if ! sessions="$("${tmux_cmd[@]}" list-sessions -F '#{session_name}\t#{session_attached}\t#{session_created_string}' 2>/dev/null)"; then + echo "No tmux server found on $label" >&2 + return 1 + fi + + if [[ -n "$query" ]]; then + sessions="$(printf '%s\n' "$sessions" | grep -i -- "$query" || true)" + fi + + if [[ -z "$sessions" ]]; then + echo "No sessions found on $label" + return 0 + fi + + echo "Sessions on $label:" + printf '%s\n' "$sessions" | while IFS=$'\t' read -r name attached created; do + attached_label=$([[ "$attached" == "1" ]] && echo "attached" || echo "detached") + printf ' - %s (%s, started %s)\n' "$name" "$attached_label" "$created" + done +} + +if [[ "$scan_all" == true ]]; then + if [[ ! -d "$socket_dir" ]]; then + echo "Socket directory not found: $socket_dir" >&2 + exit 1 + fi + + shopt -s nullglob + sockets=("$socket_dir"/*) + shopt -u nullglob + + if [[ "${#sockets[@]}" -eq 0 ]]; then + echo "No sockets found under $socket_dir" >&2 + exit 1 + fi + + exit_code=0 + for sock in "${sockets[@]}"; do + if [[ ! -S "$sock" ]]; then + continue + fi + list_sessions "socket path '$sock'" -S "$sock" || exit_code=$? + done + exit "$exit_code" +fi + +tmux_cmd=(tmux) +socket_label="default socket" + +if [[ -n "$socket_name" ]]; then + tmux_cmd+=(-L "$socket_name") + socket_label="socket name '$socket_name'" +elif [[ -n "$socket_path" ]]; then + tmux_cmd+=(-S "$socket_path") + socket_label="socket path '$socket_path'" +fi + +list_sessions "$socket_label" "${tmux_cmd[@]:1}" diff --git a/.claude/skills/tmux/scripts/wait-for-text.sh b/.claude/skills/tmux/scripts/wait-for-text.sh new file mode 100755 index 000000000..56354be83 --- /dev/null +++ b/.claude/skills/tmux/scripts/wait-for-text.sh @@ -0,0 +1,83 @@ +#!/usr/bin/env bash +set -euo pipefail + +usage() { + cat <<'USAGE' +Usage: wait-for-text.sh -t target -p pattern [options] + +Poll a tmux pane for text and exit when found. + +Options: + -t, --target tmux target (session:window.pane), required + -p, --pattern regex pattern to look for, required + -F, --fixed treat pattern as a fixed string (grep -F) + -T, --timeout seconds to wait (integer, default: 15) + -i, --interval poll interval in seconds (default: 0.5) + -l, --lines number of history lines to inspect (integer, default: 1000) + -h, --help show this help +USAGE +} + +target="" +pattern="" +grep_flag="-E" +timeout=15 +interval=0.5 +lines=1000 + +while [[ $# -gt 0 ]]; do + case "$1" in + -t|--target) target="${2-}"; shift 2 ;; + -p|--pattern) pattern="${2-}"; shift 2 ;; + -F|--fixed) grep_flag="-F"; shift ;; + -T|--timeout) timeout="${2-}"; shift 2 ;; + -i|--interval) interval="${2-}"; shift 2 ;; + -l|--lines) lines="${2-}"; shift 2 ;; + -h|--help) usage; exit 0 ;; + *) echo "Unknown option: $1" >&2; usage; exit 1 ;; + esac +done + +if [[ -z "$target" || -z "$pattern" ]]; then + echo "target and pattern are required" >&2 + usage + exit 1 +fi + +if ! [[ "$timeout" =~ ^[0-9]+$ ]]; then + echo "timeout must be an integer number of seconds" >&2 + exit 1 +fi + +if ! [[ "$lines" =~ ^[0-9]+$ ]]; then + echo "lines must be an integer" >&2 + exit 1 +fi + +if ! command -v tmux >/dev/null 2>&1; then + echo "tmux not found in PATH" >&2 + exit 1 +fi + +# End time in epoch seconds (integer, good enough for polling) +start_epoch=$(date +%s) +deadline=$((start_epoch + timeout)) + +while true; do + # -J joins wrapped lines, -S uses negative index to read last N lines + pane_text="$(tmux capture-pane -p -J -t "$target" -S "-${lines}" 2>/dev/null || true)" + + if printf '%s\n' "$pane_text" | grep $grep_flag -- "$pattern" >/dev/null 2>&1; then + exit 0 + fi + + now=$(date +%s) + if (( now >= deadline )); then + echo "Timed out after ${timeout}s waiting for pattern: $pattern" >&2 + echo "Last ${lines} lines from $target:" >&2 + printf '%s\n' "$pane_text" >&2 + exit 1 + fi + + sleep "$interval" +done diff --git a/.github/workflows/backend-ci.yml b/.github/workflows/backend-ci.yml index 3c34cb818..e744784bf 100644 --- a/.github/workflows/backend-ci.yml +++ b/.github/workflows/backend-ci.yml @@ -143,6 +143,7 @@ jobs: - name: Generate test report if: always() && steps.check-reports.outputs.reports_exist == 'true' + continue-on-error: true uses: dorny/test-reporter@v1 with: name: Backend Test Report diff --git a/.gitignore b/.gitignore index ccaa7935a..36f3cd94a 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,9 @@ target/ !**/src/main/**/target/ !**/src/test/**/target/ +### jqwik ### +.jqwik-database + ### IntelliJ IDEA ### .idea *.iws @@ -31,10 +34,6 @@ build/ ### VS Code ### .vscode/ -### AI Coding Assistants ### -.cursor/ -.qoder/ - ### Mac OS ### .DS_Store @@ -42,8 +41,27 @@ build/ package-lock.json +### Node modules ### +node_modules/ + .clinerules/ # Local / deploy scripts (do not commit) .env push-himarket.sh + +/workspace/ +/workspaces/ + +### Local docs ### +/docs/acp +.app.pid + +### OpenSandbox (local clone, not committed) ### +/OpenSandbox/ + +### Nacos (local symlink/clone, not committed) ### +/nacos + +### Kiro (AWS Kira AI coding agent) ### +.kiro/ diff --git a/.husky/pre-commit b/.husky/pre-commit deleted file mode 100755 index c34d5a4e8..000000000 --- a/.husky/pre-commit +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env sh - -# 检查前端子目录的 diff 文件 -echo "🔍 Linting frontend diff files..." -cd himarket-web/himarket-frontend && npx lint-staged - -# 检查管理后台子目录的 diff 文件 -echo "🔍 Linting admin diff files..." -cd ../himarket-admin && npx lint-staged - diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties new file mode 100644 index 000000000..15a6c6e87 --- /dev/null +++ b/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,3 @@ +wrapperVersion=3.3.4 +distributionType=only-script +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.3/apache-maven-3.8.3-bin.zip diff --git a/.qoder/skills/create-issue-himarket/SKILL.md b/.qoder/skills/create-issue-himarket/SKILL.md new file mode 100644 index 000000000..244461d22 --- /dev/null +++ b/.qoder/skills/create-issue-himarket/SKILL.md @@ -0,0 +1,163 @@ +--- +name: create-issue-himarket +description: "通过自然语言在 HiMarket 社区创建 Issue。支持 Feature Request(功能请求)和 Bug Report(问题报告)两种类型。当用户想要向 HiMarket 提交功能建议或报告问题时使用此 skill。" +--- + +# 创建 HiMarket Issue + +## 概述 + +帮助用户通过自然语言描述,在 [higress-group/himarket](https://github.com/higress-group/himarket) 仓库创建规范的 Issue。 + +## Issue 类型 + +HiMarket 支持两种 Issue 类型: + +### 1. Feature Request(功能请求) + +用于提交新功能建议或改进想法。 + +**必填信息:** +- **Why do you need it?** - 为什么需要这个功能?描述你遇到的问题或痛点 +- **How could it be?** - 期望的功能是什么样的?描述输入和输出 +- **Other related information** - 其他相关信息、截图或上下文(可选) + +### 2. Bug Report(问题报告) + +用于报告 Bug 或异常行为。 + +**必填信息:** +- **Issue Description** - 问题简述 +- **What happened** - 发生了什么?包括异常堆栈信息 +- **Expected behavior** - 期望的行为是什么 +- **Reproduction steps** - 最小化的复现步骤 +- **Environment** - 环境信息(可选) + +**可选信息:** +- **Anything else** - 其他补充信息 +- **Root Cause** - 根因分析(如已定位) +- **Proposed Solution** - 建议的解决方案 + +## 工作流程 + +### 步骤 1:确定 Issue 类型 + +首先询问用户要创建的 Issue 类型: +- 功能请求(Feature Request) +- 问题报告(Bug Report) + +### 步骤 2:收集必要信息 + +根据 Issue 类型,通过对话逐步收集必要信息: + +**对于功能请求:** +1. 询问为什么需要这个功能(遇到了什么问题) +2. 询问期望的功能实现方式 +3. 询问是否有其他补充信息 + +**对于问题报告:** +1. 询问问题的简要描述 +2. 询问具体发生了什么(包括错误信息) +3. 询问期望的正确行为 +4. 询问复现步骤 +5. 询问是否已定位根因或有解决方案建议 + +### 步骤 3:生成 Issue 内容 + +根据收集的信息,按模板格式生成 Issue 内容,展示给用户确认。 + +### 步骤 4:创建 Issue + +确认后,使用 GitHub CLI 创建 Issue: + +```bash +# Feature Request +gh issue create --repo higress-group/himarket \ + --title "[Feature] <标题>" \ + --body "$(cat <<'EOF' +## Why do you need it? +<用户描述的问题/痛点> + +## How could it be? +<期望的功能实现> + +## Other related information +<其他补充信息> + +--- +**Type**: Feature Request +EOF +)" + +# Bug Report +gh issue create --repo higress-group/himarket \ + --title "[Bug] <标题>" \ + --body "$(cat <<'EOF' +## Issue Description +<问题简述> + +## What happened +<发生了什么> + +``` +<异常堆栈或错误信息> +``` + +## Expected behavior +<期望行为> + +## Reproduction steps +1. <步骤1> +2. <步骤2> +3. <步骤3> + +## Root Cause (if known) +<根因分析> + +## Proposed Solution (if any) +<建议的解决方案> + +## Files Changed (if applicable) +- `path/to/file1` +- `path/to/file2` + +--- +**Type**: Bug Report +**Component**: +EOF +)" +``` + +## 关键原则 + +- **一次一个问题** - 逐步收集信息,不要一次性询问太多 +- **提供示例** - 在询问时给出示例帮助用户理解 +- **确认重复** - 提醒用户先搜索是否有类似 Issue +- **语言选择** - 支持用户用中文或英文描述 +- **预览确认** - 创建前展示完整内容让用户确认 +- **返回链接** - 创建成功后返回 Issue URL +- **标题前缀** - Bug 使用 `[Bug]`,功能请求使用 `[Feature]`,也可按组件添加如 `[Frontend]` + +## 示例对话 + +**用户:** 我想给 HiMarket 提一个功能建议 + +**助手:** 好的,我来帮你创建一个 Feature Request。请问你为什么需要这个功能?遇到了什么问题或痛点? + +**用户:** 聊天页面停止按钮点击没反应 + +**助手:** 明白了,这是一个 Bug Report。请问具体是什么情况?在什么操作下发现的? + +**用户:** 模型推理过程中点击停止按钮无法取消 + +**助手:** 好的,我整理了以下 Issue 内容,请确认: + +> **Title:** [Frontend] HiChat 停止按钮无法取消模型推理 +> +> ## Issue Description +> HiChat 聊天页面在模型推理过程中,停止按钮点击无响应。 +> +> ## Expected behavior +> 点击停止按钮后应中断推理过程。 + +确认无误后我将创建这个 Issue。 diff --git a/.qoder/skills/create-pr-himarket/SKILL.md b/.qoder/skills/create-pr-himarket/SKILL.md new file mode 100644 index 000000000..f57d682f4 --- /dev/null +++ b/.qoder/skills/create-pr-himarket/SKILL.md @@ -0,0 +1,278 @@ +--- +name: create-pr-himarket +description: "为 HiMarket 项目创建符合规范的 Pull Request。当用户需要提交代码、推送分支或创建 PR 时使用此 skill,确保 PR 标题和内容符合项目 CI 检查要求。" +--- + +# 创建 HiMarket Pull Request + +## 概述 + +帮助用户在 [higress-group/himarket](https://github.com/higress-group/himarket) 仓库创建符合规范的 Pull Request,确保通过 PR Title Check、PR Content Check 和 PR Size Check。 + +## PR 标题格式 + +### 必需格式 + +``` +type: 简短描述 +``` + +或带范围: + +``` +type(scope): 简短描述 +``` + +### 允许的 Type + +| Type | 说明 | 示例 | +|------|------|------| +| `feat` | 新功能 | `feat: add user authentication` | +| `fix` | Bug 修复 | `fix: resolve memory leak` | +| `docs` | 文档更新 | `docs: update API documentation` | +| `style` | 代码格式 | `style: format with prettier` | +| `refactor` | 重构 | `refactor: simplify service logic` | +| `perf` | 性能优化 | `perf: optimize queries` | +| `test` | 测试 | `test: add unit tests` | +| `build` | 构建系统 | `build: update dependencies` | +| `ci` | CI/CD | `ci: add workflow` | +| `chore` | 其他变更 | `chore: update gitignore` | +| `revert` | 回滚 | `revert: revert commit abc123` | + +### 标题规则 + +1. 必须包含 type 前缀 +2. type 后需要冒号和空格:`feat: ` 而不是 `feat:` +3. 描述必须以**小写字母**开头 +4. 保持简短清晰(建议 < 50 字符) + +## PR 内容格式(必填) + +### 必填部分 + +#### 1. Description(必填) + +必须包含 `## 📝 Description` 部分,且内容至少 10 个字符。 + +```markdown +## 📝 Description + +- 变更点 1 +- 变更点 2 +- 变更点 3 +``` + +#### 2. Type of Change(必填) + +必须至少勾选一项变更类型。 + +```markdown +## ✅ Type of Change + +- [x] Bug fix (non-breaking change) +- [ ] New feature (non-breaking change) +- [ ] Breaking change +- [ ] Documentation update +- [ ] Code refactoring +- [ ] Performance improvement +``` + +### 可选部分 + +#### 3. Related Issues(推荐) + +```markdown +## 🔗 Related Issues + +Fix #123 +Close #456 +``` + +#### 4. Testing(推荐) + +```markdown +## 🧪 Testing + +- [x] Unit tests added/updated +- [x] Manual testing completed +- 测试场景描述 +``` + +#### 5. Checklist(推荐) + +```markdown +## 📋 Checklist + +- [x] Code has been formatted (`mvn spotless:apply` for backend, `npm run lint:fix` for frontend) +- [x] Code is self-reviewed +- [x] No breaking changes +- [x] All CI checks pass +``` + +## 工作流程 + +### 步骤 1:检查当前状态 + +```bash +# 并行执行以下命令 +git status +git diff --stat +git log --oneline -5 +git branch -a +git remote -v +``` + +### 步骤 2:确认分支和远程 + +- 确认当前分支名称 +- 确认 origin 指向用户的 fork(如 `lexburner/himarket`) +- 确认 upstream 指向主仓库(`higress-group/himarket`) + +### 步骤 3:推送分支 + +```bash +git push -u origin +``` + +### 步骤 4:创建 PR + +使用 HEREDOC 格式确保内容正确: + +```bash +gh pr create --repo higress-group/himarket \ + --base main \ + --head : \ + --title "feat: 简短描述" \ + --body "$(cat <<'EOF' +## 📝 Description + +- 变更点 1 +- 变更点 2 + +## 🔗 Related Issues + +Fix #123 + +## ✅ Type of Change + +- [ ] Bug fix (non-breaking change) +- [x] New feature (non-breaking change) +- [ ] Breaking change +- [ ] Documentation update +- [ ] Code refactoring +- [ ] Performance improvement + +## 🧪 Testing + +- [x] Unit tests pass locally +- [x] Manual testing completed + +## 📋 Checklist + +- [x] Code has been formatted (`mvn spotless:apply` for backend, `npm run lint:fix` for frontend) +- [x] Code is self-reviewed +- [x] No breaking changes +- [x] All CI checks pass +EOF +)" +``` + +### 步骤 5:验证检查状态 + +```bash +gh pr checks --repo higress-group/himarket +``` + +确保以下检查通过: +- PR Title Check +- PR Content Check +- PR Size Check +- PR Validation Summary + +## 关键原则 + +- **标题小写** - 描述部分必须以小写字母开头 +- **内容完整** - 必须包含 `## 📝 Description` 和 `## ✅ Type of Change` +- **勾选类型** - Type of Change 必须至少勾选一项 `[x]` +- **关联 Issue** - 推荐使用 `Fix #123` 格式关联 Issue +- **格式化代码** - 提交前运行 `mvn spotless:apply` 或 `npm run lint:fix` +- **不提交图片** - 避免将截图等二进制文件提交到仓库 + +## 常见错误 + +### 错误 1:标题首字母大写 + +``` +❌ feat: Add new feature +✅ feat: add new feature +``` + +### 错误 2:缺少 Description 标题 + +```markdown +❌ 直接写内容 +✅ ## 📝 Description + 内容 +``` + +### 错误 3:未勾选 Type of Change + +```markdown +❌ - [ ] New feature +✅ - [x] New feature +``` + +### 错误 4:Description 内容太短 + +```markdown +❌ ## 📝 Description + Fix bug + +✅ ## 📝 Description + Fix pagination bug in product list +``` + +## 完整示例 + +**标题:** +``` +feat(chat): add tool call support and stop generation feature +``` + +**内容:** +```markdown +## 📝 Description + +- 添加聊天工具调用(Tool Call)支持,工具执行状态按消息顺序内联展示 +- 添加停止生成过程功能,支持中断正在进行的 AI 回复 +- 优化模型推理时滚动条自由滑动体验 + +## 🔗 Related Issues + +Fix #163 +Fix #164 +Fix #165 + +## ✅ Type of Change + +- [x] Bug fix (non-breaking change) +- [x] New feature (non-breaking change) +- [ ] Breaking change +- [ ] Documentation update +- [ ] Code refactoring +- [ ] Performance improvement + +## 🧪 Testing + +- [x] Unit tests pass locally +- [x] Manual testing completed +- 测试停止按钮能否正常中断 SSE 流式请求 +- 测试模型推理时滚动条是否可自由滑动 + +## 📋 Checklist + +- [x] Code has been formatted (`mvn spotless:apply` for backend, `npm run lint:fix` for frontend) +- [x] Code is self-reviewed +- [x] No breaking changes +- [x] All CI checks pass +``` diff --git a/.qoder/skills/java-coding-standards/SKILL.md b/.qoder/skills/java-coding-standards/SKILL.md new file mode 100644 index 000000000..af9902559 --- /dev/null +++ b/.qoder/skills/java-coding-standards/SKILL.md @@ -0,0 +1,147 @@ +--- +name: java-coding-standards +description: "Java coding standards for Spring Boot services: naming, immutability, Optional usage, streams, exceptions, generics, and project layout." +origin: ECC +--- + +# Java Coding Standards + +Standards for readable, maintainable Java (17+) code in Spring Boot services. + +## When to Activate + +- Writing or reviewing Java code in Spring Boot projects +- Enforcing naming, immutability, or exception handling conventions +- Working with records, sealed classes, or pattern matching (Java 17+) +- Reviewing use of Optional, streams, or generics +- Structuring packages and project layout + +## Core Principles + +- Prefer clarity over cleverness +- Immutable by default; minimize shared mutable state +- Fail fast with meaningful exceptions +- Consistent naming and package structure + +## Naming + +```java +// ✅ Classes/Records: PascalCase +public class MarketService {} +public record Money(BigDecimal amount, Currency currency) {} + +// ✅ Methods/fields: camelCase +private final MarketRepository marketRepository; +public Market findBySlug(String slug) {} + +// ✅ Constants: UPPER_SNAKE_CASE +private static final int MAX_PAGE_SIZE = 100; +``` + +## Immutability + +```java +// ✅ Favor records and final fields +public record MarketDto(Long id, String name, MarketStatus status) {} + +public class Market { + private final Long id; + private final String name; + // getters only, no setters +} +``` + +## Optional Usage + +```java +// ✅ Return Optional from find* methods +Optional market = marketRepository.findBySlug(slug); + +// ✅ Map/flatMap instead of get() +return market + .map(MarketResponse::from) + .orElseThrow(() -> new EntityNotFoundException("Market not found")); +``` + +## Streams Best Practices + +```java +// ✅ Use streams for transformations, keep pipelines short +List names = markets.stream() + .map(Market::name) + .filter(Objects::nonNull) + .toList(); + +// ❌ Avoid complex nested streams; prefer loops for clarity +``` + +## Exceptions + +- Use unchecked exceptions for domain errors; wrap technical exceptions with context +- Create domain-specific exceptions (e.g., `MarketNotFoundException`) +- Avoid broad `catch (Exception ex)` unless rethrowing/logging centrally + +```java +throw new MarketNotFoundException(slug); +``` + +## Generics and Type Safety + +- Avoid raw types; declare generic parameters +- Prefer bounded generics for reusable utilities + +```java +public Map indexById(Collection items) { ... } +``` + +## Project Structure (Maven/Gradle) + +``` +src/main/java/com/example/app/ + config/ + controller/ + service/ + repository/ + domain/ + dto/ + util/ +src/main/resources/ + application.yml +src/test/java/... (mirrors main) +``` + +## Formatting and Style + +- Use 2 or 4 spaces consistently (project standard) +- One public top-level type per file +- Keep methods short and focused; extract helpers +- Order members: constants, fields, constructors, public methods, protected, private + +## Code Smells to Avoid + +- Long parameter lists → use DTO/builders +- Deep nesting → early returns +- Magic numbers → named constants +- Static mutable state → prefer dependency injection +- Silent catch blocks → log and act or rethrow + +## Logging + +```java +private static final Logger log = LoggerFactory.getLogger(MarketService.class); +log.info("fetch_market slug={}", slug); +log.error("failed_fetch_market slug={}", slug, ex); +``` + +## Null Handling + +- Accept `@Nullable` only when unavoidable; otherwise use `@NonNull` +- Use Bean Validation (`@NotNull`, `@NotBlank`) on inputs + +## Testing Expectations + +- JUnit 5 + AssertJ for fluent assertions +- Mockito for mocking; avoid partial mocks where possible +- Favor deterministic tests; no hidden sleeps + +**Remember**: Keep code intentional, typed, and observable. Optimize for maintainability over micro-optimizations unless proven necessary. diff --git a/.qoder/skills/publish-skills/SKILL.md b/.qoder/skills/publish-skills/SKILL.md new file mode 100644 index 000000000..d1c81f9ce --- /dev/null +++ b/.qoder/skills/publish-skills/SKILL.md @@ -0,0 +1,216 @@ +--- +name: publish-skills +description: Publish local Agent Skills to a HiMarket backend instance. Use when the user wants to upload, publish, deploy, or sync skills to HiMarket. Supports batch publishing all skills in a directory with automatic category selection, tag generation, and conflict avoidance. +--- + +# Publish Skills to HiMarket + +将本地 Skill 目录批量发布到 HiMarket 后台,自动处理分类选择、Tag 生成、冲突检测和门户发布。 + +## Configuration + +从 `~/.env` 读取以下变量(也可通过 shell 环境变量覆盖): + +``` +HIMARKET_PUBLISH_URL=http://localhost:8080 # HiMarket 后台地址 +HIMARKET_PUBLISH_USERNAME=admin # 管理员用户名 +HIMARKET_PUBLISH_PASSWORD=admin # 管理员密码 +``` + +如果 `~/.env` 中没有这些变量,提示用户配置后再执行。 + +## Invocation + +用户调用方式: +- `publish-skills` — 发布默认目录 `~/Downloads/skills` 下所有 skill +- `publish-skills /path/to/skills` — 发布指定目录下所有 skill +- `publish-skills /path/to/skills/pdf` — 仅发布单个 skill + +## Workflow + +### Step 1: Load Config & Authenticate + +```bash +# 加载环境变量 +source ~/.env 2>/dev/null +HM_URL="${HIMARKET_PUBLISH_URL:-http://localhost:8080}" +HM_USER="${HIMARKET_PUBLISH_USERNAME:-admin}" +HM_PASS="${HIMARKET_PUBLISH_PASSWORD:-admin}" + +# 获取 admin token +TOKEN=$(curl -s --connect-timeout 10 --max-time 15 -X POST "$HM_URL/admins/login" \ + -H "Content-Type: application/json" \ + -d "{\"username\":\"$HM_USER\",\"password\":\"$HM_PASS\"}" | jq -r '.data.access_token') +``` + +验证 TOKEN 非空且非 `null`,否则报错退出。 + +### Step 2: Get Portal & Existing Data + +```bash +# 获取默认门户 ID +PORTAL_ID=$(curl -s --max-time 15 -H "Authorization: Bearer $TOKEN" \ + "$HM_URL/portals?size=1" | jq -r '.data.content[0].portalId // empty') + +# 获取已有 AGENT_SKILL 产品列表(避免重复) +curl -s -H "Authorization: Bearer $TOKEN" \ + "$HM_URL/products?type=AGENT_SKILL&size=200" | jq '.data.content[] | {name, productId}' + +# 获取已有分类列表 +curl -s -H "Authorization: Bearer $TOKEN" \ + "$HM_URL/product-categories?size=200" | jq '.data.content[] | {categoryId, name}' +``` + +记录已有产品名称和分类信息,用于后续冲突检测和分类匹配。 + +### Step 3: Scan Skills Directory + +遍历目标目录下每个子目录,检查是否包含 `SKILL.md`: +- 有 `SKILL.md` → 有效 skill,继续处理 +- 无 `SKILL.md` → 跳过,记录日志 + +从 `SKILL.md` front matter 中解析: +- `name` — skill 名称(必填) +- `description` — 描述(截断到 256 字符) + +**冲突检测**:如果该 name 已存在于远端产品列表中,记录其 productId,后续走更新流程(仅重新上传 package),不重新创建产品。 + +### Step 4: Determine Category + +根据 skill 的 `name`、`description` 和 `SKILL.md` 内容,为每个 skill 选择最合适的分类。 + +**预定义分类映射**(优先匹配): + +| Skill 关键词 | 分类名称 | 说明 | +|---|---|---| +| pdf, docx, pptx, xlsx | 文档处理 | 文档读写、格式转换 | +| frontend-design, notion-infographic, remotion | 设计创意 | UI 设计、图形生成、视频制作 | +| vite | 开发工具 | 开发框架、构建工具 | +| crawl, extract, search, tavily-best-practices, discord | 自动化 | 爬虫、搜索、集成 | +| find-skill, find-skills | 技能发现 | Skill 搜索和安装 | +| research | 效率提升 | 调研、信息整合 | + +**如果 skill 不在预定义映射中**,阅读其 `SKILL.md` 内容后自行判断最佳分类。 + +**分类匹配逻辑**: +1. 先在已有分类列表中按名称模糊匹配 +2. 匹配到 → 使用已有 categoryId +3. 未匹配到 → 创建新分类: + +```bash +curl -s -X POST "$HM_URL/product-categories" \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"name":"分类名称","description":"分类描述"}' +``` + +### Step 5: Generate Tags + +为每个 skill 生成 3-6 个有意义的 tag。Tag 要求: +- 英文小写,用连字符分隔(如 `pdf-parsing`、`web-scraping`) +- 反映 skill 的核心能力和使用场景 +- 不要过于宽泛(避免 `tool`、`utility` 这类无意义 tag) + +**Tag 生成参考**: + +| Skill | 推荐 Tags | +|---|---| +| pdf | `pdf-parsing`, `document-generation`, `form-filling`, `text-extraction`, `page-manipulation` | +| docx | `word-document`, `docx-generation`, `content-extraction`, `template-processing` | +| pptx | `presentation`, `slide-generation`, `powerpoint`, `content-modification` | +| xlsx | `spreadsheet`, `excel`, `data-manipulation`, `formula-processing` | +| vite | `web-development`, `react`, `frontend-scaffold`, `build-tool` | +| frontend-design | `ui-design`, `frontend`, `visual-design`, `web-components` | +| notion-infographic | `infographic`, `visual-content`, `notion-style`, `social-media` | +| remotion | `video-creation`, `react-video`, `animation`, `media-production` | +| crawl | `web-crawling`, `content-download`, `site-archival`, `markdown-conversion` | +| search | `web-search`, `tavily`, `content-discovery`, `real-time-data` | +| extract | `content-extraction`, `url-parsing`, `web-content`, `markdown` | +| research | `ai-research`, `topic-synthesis`, `citations`, `knowledge-base` | +| find-skills | `skill-discovery`, `skill-installation`, `capability-search` | +| discord | `discord-bot`, `messaging`, `community-management`, `automation` | +| tavily-best-practices | `tavily-integration`, `search-api`, `rag-pipeline`, `agentic-workflow` | + +**如果 skill 不在上表中**,阅读 `SKILL.md` 内容后自行生成合理的 tag。 + +### Step 6: Create or Update Product + +**新建产品**(远端不存在同名产品时): + +```bash +# document 是 SKILL.md 的全文内容 +PAYLOAD=$(jq -n \ + --arg name "$SKILL_NAME" \ + --arg desc "$DESCRIPTION" \ + --arg doc "$DOCUMENT" \ + --argjson cats '["categoryId1"]' \ + --argjson tags '["tag1","tag2","tag3"]' \ + '{ + name: $name, + description: $desc, + type: "AGENT_SKILL", + document: $doc, + autoApprove: true, + categories: $cats, + feature: { skillConfig: { skillTags: $tags } } + }') + +curl -s -X POST "$HM_URL/products" \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d "$PAYLOAD" | jq '.data.productId' +``` + +**已存在产品**:直接使用已有 productId,跳到 Step 7 上传 package。 + +### Step 7: Zip & Upload Package + +```bash +# 打包 skill 目录为 zip(排除 .DS_Store) +TMPZIP=$(mktemp /tmp/skill-XXXXXX.zip) +(cd "$SKILL_DIR" && zip -qry "$TMPZIP" . --exclude "*.DS_Store") + +# 上传到 HiMarket(支持重试,Nacos 写入可能较慢) +curl -s --max-time 120 -X POST "$HM_URL/skills/$PRODUCT_ID/package" \ + -H "Authorization: Bearer $TOKEN" \ + -F "file=@$TMPZIP;type=application/zip" + +rm -f "$TMPZIP" +``` + +上传失败时最多重试 3 次,每次间隔递增(1s、2s、3s)。 + +### Step 8: Publish to Portal + +```bash +curl -s -X POST "$HM_URL/products/$PRODUCT_ID/publications" \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d "{\"portalId\":\"$PORTAL_ID\"}" +``` + +如果产品已发布(返回错误),忽略该错误继续。 + +### Step 9: Summary + +处理完所有 skill 后,输出汇总表格: + +| Skill | 状态 | 分类 | Tags | 备注 | +|---|---|---|---|---| +| pdf | 新建并发布 | 文档处理 | pdf-parsing, ... | | +| vite | 已存在,更新包 | 开发工具 | web-development, ... | | + +## Error Handling + +- **Token 获取失败**:检查 URL 和凭据,提示用户检查 `~/.env` 配置 +- **Portal 不存在**:警告用户需先在 HiMarket 后台创建门户 +- **上传超时**:自动重试,最终失败记录到汇总 +- **分类创建失败**:跳过分类关联,继续其他步骤 +- **产品创建失败**:记录错误,继续处理下一个 skill + +## Important Notes + +- 执行前始终先列出将要处理的 skill 清单,等用户确认后再开始 +- 每处理完一个 skill 立即输出进度 +- 使用 TodoWrite 跟踪每个 skill 的处理状态 +- description 截断到 256 字符以符合 API 限制 diff --git a/.qoder/skills/springboot-verification/SKILL.md b/.qoder/skills/springboot-verification/SKILL.md new file mode 100644 index 000000000..c8f790aac --- /dev/null +++ b/.qoder/skills/springboot-verification/SKILL.md @@ -0,0 +1,231 @@ +--- +name: springboot-verification +description: "Verification loop for Spring Boot projects: build, static analysis, tests with coverage, security scans, and diff review before release or PR." +origin: ECC +--- + +# Spring Boot Verification Loop + +Run before PRs, after major changes, and pre-deploy. + +## When to Activate + +- Before opening a pull request for a Spring Boot service +- After major refactoring or dependency upgrades +- Pre-deployment verification for staging or production +- Running full build → lint → test → security scan pipeline +- Validating test coverage meets thresholds + +## Phase 1: Build + +```bash +mvn -T 4 clean verify -DskipTests +# or +./gradlew clean assemble -x test +``` + +If build fails, stop and fix. + +## Phase 2: Static Analysis + +Maven (common plugins): +```bash +mvn -T 4 spotbugs:check pmd:check checkstyle:check +``` + +Gradle (if configured): +```bash +./gradlew checkstyleMain pmdMain spotbugsMain +``` + +## Phase 3: Tests + Coverage + +```bash +mvn -T 4 test +mvn jacoco:report # verify 80%+ coverage +# or +./gradlew test jacocoTestReport +``` + +Report: +- Total tests, passed/failed +- Coverage % (lines/branches) + +### Unit Tests + +Test service logic in isolation with mocked dependencies: + +```java +@ExtendWith(MockitoExtension.class) +class UserServiceTest { + + @Mock private UserRepository userRepository; + @InjectMocks private UserService userService; + + @Test + void createUser_validInput_returnsUser() { + var dto = new CreateUserDto("Alice", "alice@example.com"); + var expected = new User(1L, "Alice", "alice@example.com"); + when(userRepository.save(any(User.class))).thenReturn(expected); + + var result = userService.create(dto); + + assertThat(result.name()).isEqualTo("Alice"); + verify(userRepository).save(any(User.class)); + } + + @Test + void createUser_duplicateEmail_throwsException() { + var dto = new CreateUserDto("Alice", "existing@example.com"); + when(userRepository.existsByEmail(dto.email())).thenReturn(true); + + assertThatThrownBy(() -> userService.create(dto)) + .isInstanceOf(DuplicateEmailException.class); + } +} +``` + +### Integration Tests with Testcontainers + +Test against a real database instead of H2: + +```java +@SpringBootTest +@Testcontainers +class UserRepositoryIntegrationTest { + + @Container + static PostgreSQLContainer postgres = new PostgreSQLContainer<>("postgres:16-alpine") + .withDatabaseName("testdb"); + + @DynamicPropertySource + static void configureProperties(DynamicPropertyRegistry registry) { + registry.add("spring.datasource.url", postgres::getJdbcUrl); + registry.add("spring.datasource.username", postgres::getUsername); + registry.add("spring.datasource.password", postgres::getPassword); + } + + @Autowired private UserRepository userRepository; + + @Test + void findByEmail_existingUser_returnsUser() { + userRepository.save(new User("Alice", "alice@example.com")); + + var found = userRepository.findByEmail("alice@example.com"); + + assertThat(found).isPresent(); + assertThat(found.get().getName()).isEqualTo("Alice"); + } +} +``` + +### API Tests with MockMvc + +Test controller layer with full Spring context: + +```java +@WebMvcTest(UserController.class) +class UserControllerTest { + + @Autowired private MockMvc mockMvc; + @MockBean private UserService userService; + + @Test + void createUser_validInput_returns201() throws Exception { + var user = new UserDto(1L, "Alice", "alice@example.com"); + when(userService.create(any())).thenReturn(user); + + mockMvc.perform(post("/api/users") + .contentType(MediaType.APPLICATION_JSON) + .content(""" + {"name": "Alice", "email": "alice@example.com"} + """)) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$.name").value("Alice")); + } + + @Test + void createUser_invalidEmail_returns400() throws Exception { + mockMvc.perform(post("/api/users") + .contentType(MediaType.APPLICATION_JSON) + .content(""" + {"name": "Alice", "email": "not-an-email"} + """)) + .andExpect(status().isBadRequest()); + } +} +``` + +## Phase 4: Security Scan + +```bash +# Dependency CVEs +mvn org.owasp:dependency-check-maven:check +# or +./gradlew dependencyCheckAnalyze + +# Secrets in source +grep -rn "password\s*=\s*\"" src/ --include="*.java" --include="*.yml" --include="*.properties" +grep -rn "sk-\|api_key\|secret" src/ --include="*.java" --include="*.yml" + +# Secrets (git history) +git secrets --scan # if configured +``` + +### Common Security Findings + +``` +# Check for System.out.println (use logger instead) +grep -rn "System\.out\.print" src/main/ --include="*.java" + +# Check for raw exception messages in responses +grep -rn "e\.getMessage()" src/main/ --include="*.java" + +# Check for wildcard CORS +grep -rn "allowedOrigins.*\*" src/main/ --include="*.java" +``` + +## Phase 5: Lint/Format (optional gate) + +```bash +mvn spotless:apply # if using Spotless plugin +./gradlew spotlessApply +``` + +## Phase 6: Diff Review + +```bash +git diff --stat +git diff +``` + +Checklist: +- No debugging logs left (`System.out`, `log.debug` without guards) +- Meaningful errors and HTTP statuses +- Transactions and validation present where needed +- Config changes documented + +## Output Template + +``` +VERIFICATION REPORT +=================== +Build: [PASS/FAIL] +Static: [PASS/FAIL] (spotbugs/pmd/checkstyle) +Tests: [PASS/FAIL] (X/Y passed, Z% coverage) +Security: [PASS/FAIL] (CVE findings: N) +Diff: [X files changed] + +Overall: [READY / NOT READY] + +Issues to Fix: +1. ... +2. ... +``` + +## Continuous Mode + +- Re-run phases on significant changes or every 30–60 minutes in long sessions +- Keep a short loop: `mvn -T 4 test` + spotbugs for quick feedback + +**Remember**: Fast feedback beats late surprises. Keep the gate strict—treat warnings as defects in production systems. diff --git a/.qoder/skills/tmux/SKILL.md b/.qoder/skills/tmux/SKILL.md new file mode 100644 index 000000000..fa589c0e2 --- /dev/null +++ b/.qoder/skills/tmux/SKILL.md @@ -0,0 +1,153 @@ +--- +name: tmux +description: Remote-control tmux sessions for interactive CLIs by sending keystrokes and scraping pane output. +metadata: + { "openclaw": { "emoji": "🧵", "os": ["darwin", "linux"], "requires": { "bins": ["tmux"] } } } +--- + +# tmux Session Control + +Control tmux sessions by sending keystrokes and reading output. Essential for managing Claude Code sessions. + +## When to Use + +✅ **USE this skill when:** + +- Monitoring Claude/Codex sessions in tmux +- Sending input to interactive terminal applications +- Scraping output from long-running processes in tmux +- Navigating tmux panes/windows programmatically +- Checking on background work in existing sessions + +## When NOT to Use + +❌ **DON'T use this skill when:** + +- Running one-off shell commands → use `exec` tool directly +- Starting new background processes → use `exec` with `background:true` +- Non-interactive scripts → use `exec` tool +- The process isn't in tmux +- You need to create a new tmux session → use `exec` with `tmux new-session` + +## Example Sessions + +| Session | Purpose | +| ----------------------- | --------------------------- | +| `shared` | Primary interactive session | +| `worker-2` - `worker-8` | Parallel worker sessions | + +## Common Commands + +### List Sessions + +```bash +tmux list-sessions +tmux ls +``` + +### Capture Output + +```bash +# Last 20 lines of pane +tmux capture-pane -t shared -p | tail -20 + +# Entire scrollback +tmux capture-pane -t shared -p -S - + +# Specific pane in window +tmux capture-pane -t shared:0.0 -p +``` + +### Send Keys + +```bash +# Send text (doesn't press Enter) +tmux send-keys -t shared "hello" + +# Send text + Enter +tmux send-keys -t shared "y" Enter + +# Send special keys +tmux send-keys -t shared Enter +tmux send-keys -t shared Escape +tmux send-keys -t shared C-c # Ctrl+C +tmux send-keys -t shared C-d # Ctrl+D (EOF) +tmux send-keys -t shared C-z # Ctrl+Z (suspend) +``` + +### Window/Pane Navigation + +```bash +# Select window +tmux select-window -t shared:0 + +# Select pane +tmux select-pane -t shared:0.1 + +# List windows +tmux list-windows -t shared +``` + +### Session Management + +```bash +# Create new session +tmux new-session -d -s newsession + +# Kill session +tmux kill-session -t sessionname + +# Rename session +tmux rename-session -t old new +``` + +## Sending Input Safely + +For interactive TUIs (Claude Code, Codex, etc.), split text and Enter into separate sends to avoid paste/multiline edge cases: + +```bash +tmux send-keys -t shared -l -- "Please apply the patch in src/foo.ts" +sleep 0.1 +tmux send-keys -t shared Enter +``` + +## Claude Code Session Patterns + +### Check if Session Needs Input + +```bash +# Look for prompts +tmux capture-pane -t worker-3 -p | tail -10 | grep -E "❯|Yes.*No|proceed|permission" +``` + +### Approve Claude Code Prompt + +```bash +# Send 'y' and Enter +tmux send-keys -t worker-3 'y' Enter + +# Or select numbered option +tmux send-keys -t worker-3 '2' Enter +``` + +### Check All Sessions Status + +```bash +for s in shared worker-2 worker-3 worker-4 worker-5 worker-6 worker-7 worker-8; do + echo "=== $s ===" + tmux capture-pane -t $s -p 2>/dev/null | tail -5 +done +``` + +### Send Task to Session + +```bash +tmux send-keys -t worker-4 "Fix the bug in auth.js" Enter +``` + +## Notes + +- Use `capture-pane -p` to print to stdout (essential for scripting) +- `-S -` captures entire scrollback history +- Target format: `session:window.pane` (e.g., `shared:0.0`) +- Sessions persist across SSH disconnects diff --git a/.qoder/skills/tmux/scripts/find-sessions.sh b/.qoder/skills/tmux/scripts/find-sessions.sh new file mode 100755 index 000000000..8387c1629 --- /dev/null +++ b/.qoder/skills/tmux/scripts/find-sessions.sh @@ -0,0 +1,112 @@ +#!/usr/bin/env bash +set -euo pipefail + +usage() { + cat <<'USAGE' +Usage: find-sessions.sh [-L socket-name|-S socket-path|-A] [-q pattern] + +List tmux sessions on a socket (default tmux socket if none provided). + +Options: + -L, --socket tmux socket name (passed to tmux -L) + -S, --socket-path tmux socket path (passed to tmux -S) + -A, --all scan all sockets under OPENCLAW_TMUX_SOCKET_DIR + -q, --query case-insensitive substring to filter session names + -h, --help show this help +USAGE +} + +socket_name="" +socket_path="" +query="" +scan_all=false +socket_dir="${OPENCLAW_TMUX_SOCKET_DIR:-${CLAWDBOT_TMUX_SOCKET_DIR:-${TMPDIR:-/tmp}/openclaw-tmux-sockets}}" + +while [[ $# -gt 0 ]]; do + case "$1" in + -L|--socket) socket_name="${2-}"; shift 2 ;; + -S|--socket-path) socket_path="${2-}"; shift 2 ;; + -A|--all) scan_all=true; shift ;; + -q|--query) query="${2-}"; shift 2 ;; + -h|--help) usage; exit 0 ;; + *) echo "Unknown option: $1" >&2; usage; exit 1 ;; + esac +done + +if [[ "$scan_all" == true && ( -n "$socket_name" || -n "$socket_path" ) ]]; then + echo "Cannot combine --all with -L or -S" >&2 + exit 1 +fi + +if [[ -n "$socket_name" && -n "$socket_path" ]]; then + echo "Use either -L or -S, not both" >&2 + exit 1 +fi + +if ! command -v tmux >/dev/null 2>&1; then + echo "tmux not found in PATH" >&2 + exit 1 +fi + +list_sessions() { + local label="$1"; shift + local tmux_cmd=(tmux "$@") + + if ! sessions="$("${tmux_cmd[@]}" list-sessions -F '#{session_name}\t#{session_attached}\t#{session_created_string}' 2>/dev/null)"; then + echo "No tmux server found on $label" >&2 + return 1 + fi + + if [[ -n "$query" ]]; then + sessions="$(printf '%s\n' "$sessions" | grep -i -- "$query" || true)" + fi + + if [[ -z "$sessions" ]]; then + echo "No sessions found on $label" + return 0 + fi + + echo "Sessions on $label:" + printf '%s\n' "$sessions" | while IFS=$'\t' read -r name attached created; do + attached_label=$([[ "$attached" == "1" ]] && echo "attached" || echo "detached") + printf ' - %s (%s, started %s)\n' "$name" "$attached_label" "$created" + done +} + +if [[ "$scan_all" == true ]]; then + if [[ ! -d "$socket_dir" ]]; then + echo "Socket directory not found: $socket_dir" >&2 + exit 1 + fi + + shopt -s nullglob + sockets=("$socket_dir"/*) + shopt -u nullglob + + if [[ "${#sockets[@]}" -eq 0 ]]; then + echo "No sockets found under $socket_dir" >&2 + exit 1 + fi + + exit_code=0 + for sock in "${sockets[@]}"; do + if [[ ! -S "$sock" ]]; then + continue + fi + list_sessions "socket path '$sock'" -S "$sock" || exit_code=$? + done + exit "$exit_code" +fi + +tmux_cmd=(tmux) +socket_label="default socket" + +if [[ -n "$socket_name" ]]; then + tmux_cmd+=(-L "$socket_name") + socket_label="socket name '$socket_name'" +elif [[ -n "$socket_path" ]]; then + tmux_cmd+=(-S "$socket_path") + socket_label="socket path '$socket_path'" +fi + +list_sessions "$socket_label" "${tmux_cmd[@]:1}" diff --git a/.qoder/skills/tmux/scripts/wait-for-text.sh b/.qoder/skills/tmux/scripts/wait-for-text.sh new file mode 100755 index 000000000..56354be83 --- /dev/null +++ b/.qoder/skills/tmux/scripts/wait-for-text.sh @@ -0,0 +1,83 @@ +#!/usr/bin/env bash +set -euo pipefail + +usage() { + cat <<'USAGE' +Usage: wait-for-text.sh -t target -p pattern [options] + +Poll a tmux pane for text and exit when found. + +Options: + -t, --target tmux target (session:window.pane), required + -p, --pattern regex pattern to look for, required + -F, --fixed treat pattern as a fixed string (grep -F) + -T, --timeout seconds to wait (integer, default: 15) + -i, --interval poll interval in seconds (default: 0.5) + -l, --lines number of history lines to inspect (integer, default: 1000) + -h, --help show this help +USAGE +} + +target="" +pattern="" +grep_flag="-E" +timeout=15 +interval=0.5 +lines=1000 + +while [[ $# -gt 0 ]]; do + case "$1" in + -t|--target) target="${2-}"; shift 2 ;; + -p|--pattern) pattern="${2-}"; shift 2 ;; + -F|--fixed) grep_flag="-F"; shift ;; + -T|--timeout) timeout="${2-}"; shift 2 ;; + -i|--interval) interval="${2-}"; shift 2 ;; + -l|--lines) lines="${2-}"; shift 2 ;; + -h|--help) usage; exit 0 ;; + *) echo "Unknown option: $1" >&2; usage; exit 1 ;; + esac +done + +if [[ -z "$target" || -z "$pattern" ]]; then + echo "target and pattern are required" >&2 + usage + exit 1 +fi + +if ! [[ "$timeout" =~ ^[0-9]+$ ]]; then + echo "timeout must be an integer number of seconds" >&2 + exit 1 +fi + +if ! [[ "$lines" =~ ^[0-9]+$ ]]; then + echo "lines must be an integer" >&2 + exit 1 +fi + +if ! command -v tmux >/dev/null 2>&1; then + echo "tmux not found in PATH" >&2 + exit 1 +fi + +# End time in epoch seconds (integer, good enough for polling) +start_epoch=$(date +%s) +deadline=$((start_epoch + timeout)) + +while true; do + # -J joins wrapped lines, -S uses negative index to read last N lines + pane_text="$(tmux capture-pane -p -J -t "$target" -S "-${lines}" 2>/dev/null || true)" + + if printf '%s\n' "$pane_text" | grep $grep_flag -- "$pattern" >/dev/null 2>&1; then + exit 0 + fi + + now=$(date +%s) + if (( now >= deadline )); then + echo "Timed out after ${timeout}s waiting for pattern: $pattern" >&2 + echo "Last ${lines} lines from $target:" >&2 + printf '%s\n' "$pane_text" >&2 + exit 1 + fi + + sleep "$interval" +done diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 000000000..6dbfd409a --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,202 @@ +**ALWAYS RESPOND IN CHINESE-SIMPLIFIED** + +## 本地开发环境 + +### 数据库访问 + +本地开发时,数据库连接信息可以通过以下任意方式提供(优先级从高到低): +- shell 环境变量(直接 export 或写入 `~/.zshrc` / `~/.bashrc`) +- `~/.env` 文件(`scripts/run.sh` 启动时会自动 source) + +需要包含以下变量: +- `DB_HOST`:数据库地址 +- `DB_PORT`:端口(默认 3306) +- `DB_NAME`:数据库名 +- `DB_USERNAME`:用户名 +- `DB_PASSWORD`:密码 + +查询数据库时,使用 mysql CLI(环境变量已在 shell 中或通过 `~/.env` 加载): + +```bash +mysql -h "$DB_HOST" -P "$DB_PORT" -u "$DB_USERNAME" -p"$DB_PASSWORD" "$DB_NAME" -e "YOUR_SQL_HERE" +``` + +注意事项: +- 只执行 SELECT 查询,除非用户明确要求修改数据 +- 不要在回复中展示完整的密码、密钥等敏感字段 +- 数据库 schema 由 Flyway 管理,迁移文件在 `himarket-bootstrap/src/main/resources/db/migration/` + +### 启动后端服务 + +使用 `scripts/run.sh` 脚本编译并启动 Java 后端: + +```bash +./scripts/run.sh +``` + +脚本会自动完成:加载环境变量 → 优雅关闭旧进程 → 编译打包 → 后台启动 jar → 轮询等待就绪。 +脚本退出码为 0 表示启动成功,非 0 表示失败(编译错误或启动超时)。 + +### 修改代码后的验证 + +以下场景建议主动进行"重启 → 接口验证"闭环,而不是只改代码就结束: +- 用户明确要求调试某个 bug 或修复接口问题 +- 新增或修改了 REST/WebSocket 接口 +- 用户要求端到端验证 +- 完成 spec 任务的代码开发后,进行端到端功能验证 + +#### 验证流程 + +1. `./scripts/run.sh` 重启,确认退出码为 0 +2. 用 curl 调用相关接口,检查返回结果 +3. 如果涉及数据变更,用 mysql CLI 查询确认 +4. 验证失败时读取 `~/himarket.log` 排查,修复后重试 + +### API 接口测试 + +后端运行在 `http://localhost:8080`,接口路径不带 `/portal` 前缀。使用 JWT Bearer Token 认证。 + +接口返回格式为 `{"code":"SUCCESS","data":{...}}`,token 在 `data.access_token` 中。 + +#### 获取管理员 Token(后台管理) + +```bash +TOKEN=$(curl -s -X POST http://localhost:8080/admins/login \ + -H "Content-Type: application/json" \ + -d '{"username":"admin","password":"admin"}' | jq -r '.data.access_token') +``` + +#### 获取开发者 Token(前台门户) + +```bash +TOKEN=$(curl -s -X POST http://localhost:8080/developers/login \ + -H "Content-Type: application/json" \ + -d '{"username":"user","password":"123456"}' | jq -r '.data.access_token') +``` + +#### 带认证请求示例 + +```bash +curl -s -H "Authorization: Bearer $TOKEN" http://localhost:8080/your-endpoint | jq . +``` + +#### WebSocket 接口验证 + +对于 WebSocket 接口,使用 `websocat` 工具: + +```bash +websocat -H "Authorization: Bearer $TOKEN" ws://localhost:8080/your-ws-endpoint +``` + +#### 认证注解说明 + +接口上的注解决定了需要哪种角色的 token: +- `@AdminAuth`:需要管理员 token +- `@DeveloperAuth`:需要开发者 token +- `@AdminOrDeveloperAuth`:两种都可以 +- 无注解:无需认证 + +Token 有效期为 7 天。Swagger 文档:`http://localhost:8080/portal/swagger-ui.html` + +### 应用日志 + +本地运行时日志文件位于 `~/himarket.log`。排查后端问题时应主动读取该日志。 + +## OpenSandbox 集成 + +HiMarket 集成了阿里巴巴开源的 OpenSandbox 项目,用于提供安全的代码执行沙箱环境。 + +### 项目位置 + +OpenSandbox 仓库位于 `OpenSandbox/` 目录(本地 clone,不提交到 git)。 + +**首次设置:** +```bash +cd /Users/xujingfeng/IdeaProjects/himarket +git clone https://github.com/alibaba/OpenSandbox.git +``` + +该目录已在 `.gitignore` 中配置,不会被提交到版本控制,但 AI Agent 可以正常访问和探索其中的源码和文档。 + +### 渐进性探索指南 + +当需要对接或调试 OpenSandbox 相关功能时,按以下顺序探索: + +1. **快速了解**:阅读 `OpenSandbox/README.md` 了解项目概述、核心功能和基本用法 +2. **开发指导**: + - `OpenSandbox/CLAUDE.md` - Claude Code 的开发指导(中文) + - `OpenSandbox/AGENTS.md` - AI Agent 的仓库指南 +3. **架构文档**:`OpenSandbox/docs/architecture.md` - 整体架构和设计理念 +4. **关键目录**: + - `OpenSandbox/server/` - Python FastAPI 沙箱生命周期管理服务 + - `OpenSandbox/sdks/` - 多语言 SDK(Python、Java/Kotlin、TypeScript、C#) + - `OpenSandbox/components/execd/` - Go 执行守护进程 + - `OpenSandbox/examples/` - 集成示例(包括 claude-code、kimi-cli 等) + - `OpenSandbox/specs/` - OpenAPI 规范文档 + - `OpenSandbox/kubernetes/` - Kubernetes 部署和 Operator + +### 何时探索 OpenSandbox + +仅在以下场景需要深入探索 OpenSandbox 源码和文档: +- 实现或调试沙箱创建、生命周期管理功能 +- 集成代码执行、命令执行、文件操作等沙箱能力 +- 排查沙箱相关的错误或性能问题 +- 扩展或定制沙箱运行时行为 +- 对接 OpenSandbox 的 API 或 SDK + +对于其他 HiMarket 功能开发,无需关注 OpenSandbox 目录。 + +## Nacos 集成 + +HiMarket 使用阿里巴巴开源的 Nacos 作为服务发现和配置管理基础设施。本地通过符号链接引入了 Nacos 源码仓库,方便 AI Agent 理解 Nacos 内部实现。 + +### 项目位置 + +Nacos 源码位于 `nacos/` 目录(本地符号链接,指向 `/Users/xujingfeng/AIProjects/nacos`,不提交到 git)。 + +**首次设置:** +```bash +cd /Users/xujingfeng/IdeaProjects/himarket +ln -s /Users/xujingfeng/AIProjects/nacos nacos +``` + +该目录已在 `.gitignore` 中配置,不会被提交到版本控制,但 AI Agent 可以正常访问和探索其中的源码和文档。 + +### 渐进性探索指南 + +当需要对接或调试 Nacos 相关功能时,按以下顺序探索: + +1. **快速了解**:阅读 `nacos/README.md` 了解项目概述(动态服务发现、配置管理、DNS 服务) +2. **架构文档**:`nacos/doc/` 目录下的设计文档 +3. **关键模块**: + - `nacos/api/` - Nacos 公共 API 定义(SPI 接口、模型类) + - `nacos/client/` - Java 客户端 SDK(服务注册/发现、配置监听) + - `nacos/naming/` - 服务注册与发现核心实现 + - `nacos/config/` - 配置管理核心实现 + - `nacos/server/` - Nacos Server 启动入口 + - `nacos/console/` - 管理控制台后端 + - `nacos/console-ui/` - 管理控制台前端 + - `nacos/core/` - 核心通用模块(集群、鉴权、分布式协议) + - `nacos/consistency/` - 一致性协议(Raft/Distro) + - `nacos/auth/` - 认证鉴权模块 + - `nacos/plugin/` - 插件体系(鉴权、配置加密、数据源等) + - `nacos/persistence/` - 持久化层 + - `nacos/distribution/` - 打包和发布配置 +4. **高级主题**(特定场景): + - `nacos/mcp-registry-adaptor/` - MCP 注册适配器 + - `nacos/istio/` - Istio 集成(MCP/xDS 协议) + - `nacos/k8s-sync/` - Kubernetes 服务同步 + - `nacos/ai/` - AI 相关能力 + - `nacos/skills/` - Skill 市场能力 + +### 何时探索 Nacos + +仅在以下场景需要深入探索 Nacos 源码和文档: +- 实现或调试 HiMarket 与 Nacos 的服务注册/发现集成 +- 对接 Nacos 配置管理能力(动态配置推送、监听) +- 排查 Nacos 客户端连接、心跳、同步等问题 +- 理解 Nacos 的一致性协议(Raft/Distro)实现细节 +- 扩展 Nacos 插件(鉴权、数据源、配置加密等) +- 对接 Nacos 的 Open API 或使用其 Java SDK + +对于其他 HiMarket 功能开发,无需关注 Nacos 目录。 diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 000000000..652cc511e --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,366 @@ +# CLAUDE.md + +本文件为 Claude Code (claude.ai/code) 在处理本仓库代码时提供指导。 + +**ALWAYS RESPOND IN CHINESE-SIMPLIFIED** + +## 项目概述 + +HiMarket 是基于 Higress AI 网关构建的企业级 AI 开放平台,帮助企业构建私有 AI 能力市场,管理和分发 AI 资源(LLM、MCP Server、Agent、Agent Skill)。 + +**仓库结构:** +- `himarket-bootstrap/` - Spring Boot 应用入口和配置 +- `himarket-server/` - 业务逻辑、REST 控制器、服务 +- `himarket-dal/` - 数据访问层(JPA 实体、仓储) +- `himarket-web/himarket-admin/` - 管理门户前端(React + Vite) +- `himarket-web/himarket-frontend/` - 开发者门户前端(React + TypeScript) +- `deploy/` - Docker Compose 和 Helm 部署配置 + +## 开发命令 + +### 后端(Java 17 + Maven) + +```bash +# 构建所有模块,跳过测试 +mvn clean package -DskipTests + +# 运行测试(默认排除集成测试) +mvn test + +# 运行所有测试,包括集成测试 +mvn test -Pintegration + +# 检查代码格式(Spotless + Google Java Format) +mvn spotless:check + +# 应用代码格式化 +mvn spotless:apply + +# 本地启动后端(加载 ~/.env,杀掉 8080 端口旧进程,编译,启动 Spring Boot) +./scripts/run.sh +``` + +**后端运行地址:** `http://localhost:8080` +**Swagger UI:** `http://localhost:8080/portal/swagger-ui.html` +**日志文件:** `~/himarket.log` + +### 前端 - 管理门户(`himarket-web/himarket-admin/`) + +```bash +npm run dev # 开发服务器(端口 5174) +npm run build # 生产环境构建 +npm run lint # ESLint 检查 +npm run serve # 构建并启动生产服务器 +``` + +### 前端 - 开发者门户(`himarket-web/himarket-frontend/`) + +```bash +npm run dev # 开发服务器(端口 5173) +npm run build # 类型检查和生产环境构建 +npm run lint # ESLint 检查 +npm run type-check # 仅进行 TypeScript 类型检查 +npm run test # 运行 Vitest 测试 +npm run preview # 预览生产环境构建 +``` + +## 本地开发环境设置 + +### 数据库配置 + +数据库连接可通过以下方式配置(优先级从高到低): +1. Shell 环境变量(直接 export 或写入 `~/.zshrc` / `~/.bashrc`) +2. `~/.env` 文件(`scripts/run.sh` 自动加载) + +必需的环境变量: +```bash +DB_HOST=your_db_host +DB_PORT=3306 +DB_NAME=himarket +DB_USERNAME=your_username +DB_PASSWORD=your_password +``` + +**查询数据库:** +```bash +mysql -h "$DB_HOST" -P "$DB_PORT" -u "$DB_USERNAME" -p"$DB_PASSWORD" "$DB_NAME" -e "YOUR_SQL_HERE" +``` + +注意事项: +- 只执行 SELECT 查询,除非用户明确要求修改数据 +- 不要在回复中展示完整的密码、密钥等敏感字段 +- 数据库 schema 由 Flyway 管理,迁移文件在 `himarket-bootstrap/src/main/resources/db/migration/` + +### API 认证 + +所有 API 端点都需要 JWT Bearer Token 认证(登录/注册端点除外)。 + +**接口返回格式:** +```json +{"code":"SUCCESS","data":{...}} +``` +Token 在 `data.access_token` 中,有效期为 7 天。 + +**获取管理员 Token:** +```bash +# 基础方式 +curl -X POST http://localhost:8080/admins/login \ + -H "Content-Type: application/json" \ + -d '{"username":"admin","password":"admin"}' + +# 自动提取 token(推荐) +TOKEN=$(curl -s -X POST http://localhost:8080/admins/login \ + -H "Content-Type: application/json" \ + -d '{"username":"admin","password":"admin"}' | jq -r '.data.access_token') +``` + +**获取开发者 Token:** +```bash +# 基础方式 +curl -X POST http://localhost:8080/developers/login \ + -H "Content-Type: application/json" \ + -d '{"username":"user","password":"123456"}' + +# 自动提取 token(推荐) +TOKEN=$(curl -s -X POST http://localhost:8080/developers/login \ + -H "Content-Type: application/json" \ + -d '{"username":"user","password":"123456"}' | jq -r '.data.access_token') +``` + +**使用 Token:** +```bash +curl -s -H "Authorization: Bearer $TOKEN" http://localhost:8080/your-endpoint | jq . +``` + +**WebSocket 接口验证:** +```bash +websocat -H "Authorization: Bearer $TOKEN" ws://localhost:8080/your-ws-endpoint +``` + +### 认证注解 + +控制器方法使用注解来强制认证: +- `@AdminAuth` - 需要管理员 token +- `@DeveloperAuth` - 需要开发者 token +- `@AdminOrDeveloperAuth` - 接受任意一种 token +- 无注解 - 公开端点 + +### 启动后端服务 + +使用 `scripts/run.sh` 脚本编译并启动 Java 后端: + +```bash +./scripts/run.sh +``` + +脚本会自动完成:加载环境变量 → 优雅关闭旧进程 → 编译打包 → 后台启动 jar → 轮询等待就绪。 +脚本退出码为 0 表示启动成功,非 0 表示失败(编译错误或启动超时)。 + +### 应用日志 + +本地运行时日志文件位于 `~/himarket.log`。排查后端问题时应主动读取该日志。 + +## 高层架构 + +### 模块依赖 + +``` +himarket-bootstrap (入口点) + ├── himarket-server (业务逻辑) + │ └── himarket-dal (数据访问) + └── himarket-dal +``` + +### 核心领域实体 + +**产品管理:** +- `Product` - 代表 API 产品的核心实体(REST_API、MCP_SERVER、AGENT_API、MODEL_API) +- `ProductPublication` - 产品的已发布版本及路由配置 +- `ProductSubscription` - 开发者对产品的订阅 +- `ProductCategory` / `ProductCategoryRelation` - 产品分类 + +**身份与访问:** +- `Administrator` - 门户管理的管理员用户 +- `Developer` - 订阅 API 的开发者用户 +- `DeveloperExternalIdentity` - OIDC/OAuth 关联身份 +- `Consumer` / `ConsumerCredential` - 应用及其 API 凭证 + +**基础设施:** +- `Portal` / `PortalDomain` - 门户配置和自定义域名 +- `Gateway` - 网关配置(Higress、APIG 等) +- `NacosInstance` - Nacos 服务发现配置 +- `K8sCluster` - Kubernetes 集群配置 + +**对话式 AI:** +- `ChatSession` / `Chat` / `ChatAttachment` - AI 聊天会话和消息 + +### 关键服务模式 + +**网关集成(`service/gateway/`):** +- `AIGWOperator` - AI 网关操作的抽象接口 +- `client/GatewayClient` - 网关客户端实现 +- 不同网关实现:Higress、阿里云 APIG、MSE + +**事件驱动清理:** +- `DeveloperDeletingEvent` / `PortalDeletingEvent` / `ProductDeletingEvent` +- 事件监听器处理级联删除(凭证、订阅等) + +### 产品类型 + +产品有一个 `type` 字段,类型特定的配置以 JSON 格式存储: + +1. **REST_API** - 传统 REST API + - 配置:`apiConfig.spec`(OpenAPI/Swagger 规范) + +2. **MCP_SERVER** - Model Context Protocol 服务器 + - 配置:`mcpConfig`,包含服务器名称、域名、工具 + +3. **AGENT_API** - AI 智能体 API + - 配置:`agentConfig.agentAPIConfig`,包含路由和协议 + +4. **MODEL_API** - AI 模型 API + - 配置:`modelConfig.modelAPIConfig`,包含模型类别和路由 + +### 数据库迁移 + +Flyway 管理 `himarket-bootstrap/src/main/resources/db/migration/` 中的模式迁移: +- `V1__init.sql` - 初始模式 +- `V2__*.sql` - 后续迁移 + +## 代码风格 + +**Java:** +- Google Java Format(AOSP 风格)通过 Spotless +- 自动导入排序和删除未使用的导入 +- 在 `mvn compile` 时自动运行 + +**前端:** +- ESLint 配合 React hooks 和 refresh 插件 +- TypeScript 严格模式 +- Prettier 格式化 + +## OpenSandbox 集成 + +HiMarket 集成了阿里巴巴开源的 OpenSandbox 项目,为 AI 应用提供安全的沙箱执行环境。 + +### 项目位置 + +OpenSandbox 仓库位于 `OpenSandbox/` 目录(本地 clone,不提交到 git)。 + +**首次设置:** +```bash +cd /Users/xujingfeng/IdeaProjects/himarket +git clone https://github.com/alibaba/OpenSandbox.git +``` + +该目录已在 `.gitignore` 中配置,不会被提交到版本控制,但 Claude Code 可以正常访问和探索其中的源码和文档。 + +### 渐进性探索指南 + +**仅在需要对接或调试 OpenSandbox 功能时才探索此目录。** 按以下顺序渐进式学习: + +#### 第一层:快速了解 +- `OpenSandbox/README.md` - 项目概述、核心功能、快速开始 + +#### 第二层:开发指导 +- `OpenSandbox/CLAUDE.md` - Claude Code 的详细开发指导(中文) +- `OpenSandbox/AGENTS.md` - AI Agent 的仓库指南 +- `OpenSandbox/docs/architecture.md` - 整体架构和设计理念 + +#### 第三层:核心组件(按需探索) +- **Server**:`OpenSandbox/server/` - Python FastAPI 沙箱生命周期管理服务 + - 配置:`~/.sandbox.toml`(从 `server/example.config.toml` 复制) + - 启动:`opensandbox-server` 或 `cd server && uv run python -m src.main` +- **SDKs**:`OpenSandbox/sdks/` - 多语言客户端库 + - `sdks/sandbox/` - 基础沙箱 SDK(生命周期、命令、文件) + - `sdks/code-interpreter/` - 代码解释器 SDK + - 支持语言:Python、Java/Kotlin、JavaScript/TypeScript、C#/.NET +- **execd**:`OpenSandbox/components/execd/` - Go 执行守护进程 + - 注入到沙箱容器中,提供代码执行、命令和文件操作 +- **Examples**:`OpenSandbox/examples/` - 集成示例 + - `examples/claude-code/` - Claude Code 集成示例 + - `examples/code-interpreter/` - 代码解释器示例 + - `examples/kimi-cli/`、`examples/gemini-cli/` 等 - 其他 AI CLI 集成 + +#### 第四层:高级主题(特定场景) +- **Kubernetes**:`OpenSandbox/kubernetes/` - K8s 部署和自定义 Operator +- **Specs**:`OpenSandbox/specs/` - OpenAPI 规范(沙箱生命周期 API、执行 API) +- **Components**:`OpenSandbox/components/` - Ingress 网关、Egress 控制 +- **OSEPs**:`OpenSandbox/oseps/` - 架构提案和设计文档 + +### 何时探索 OpenSandbox + +仅在以下场景需要深入探索: +- 实现沙箱创建、启动、停止等生命周期管理 +- 集成代码执行、命令执行、文件操作等沙箱能力 +- 调试沙箱相关的错误或性能问题 +- 扩展或定制沙箱运行时行为 +- 对接 OpenSandbox 的 REST API 或使用其 SDK + +**对于其他 HiMarket 功能开发(产品管理、用户认证、网关配置等),无需关注 OpenSandbox 目录。** + +## Nacos 集成 + +HiMarket 使用阿里巴巴开源的 Nacos 作为服务发现和配置管理基础设施。本地通过符号链接引入了 Nacos 源码仓库,方便理解 Nacos 内部实现。 + +### 项目位置 + +Nacos 源码位于 `nacos/` 目录(本地符号链接,指向 `/Users/xujingfeng/AIProjects/nacos`,不提交到 git)。 + +**首次设置:** +```bash +cd /Users/xujingfeng/IdeaProjects/himarket +ln -s /Users/xujingfeng/AIProjects/nacos nacos +``` + +该目录已在 `.gitignore` 中配置,不会被提交到版本控制,但 Claude Code 可以正常访问和探索其中的源码和文档。 + +### 渐进性探索指南 + +**仅在需要对接或调试 Nacos 功能时才探索此目录。** 按以下顺序渐进式学习: + +#### 第一层:快速了解 +- `nacos/README.md` - 项目概述、核心功能(动态服务发现、配置管理、DNS 服务) + +#### 第二层:架构与设计 +- `nacos/doc/` - 设计文档和架构说明 + +#### 第三层:核心模块(按需探索) +- **API**:`nacos/api/` - 公共 API 定义(SPI 接口、模型类、异常定义) +- **Client**:`nacos/client/` - Java 客户端 SDK + - 服务注册/发现、配置监听、长连接管理 +- **Naming**:`nacos/naming/` - 服务注册与发现核心实现 + - 服务实例管理、健康检查、路由策略 +- **Config**:`nacos/config/` - 配置管理核心实现 + - 配置发布/订阅、灰度发布、历史版本 +- **Server**:`nacos/server/` - Nacos Server 启动入口 +- **Console**:`nacos/console/` + `nacos/console-ui/` - 管理控制台(后端 + 前端) +- **Core**:`nacos/core/` - 核心通用模块(集群管理、鉴权、分布式协议) +- **Consistency**:`nacos/consistency/` - 一致性协议实现(Raft/Distro) +- **Auth**:`nacos/auth/` - 认证鉴权模块 +- **Plugin**:`nacos/plugin/` - 插件体系(鉴权、配置加密、数据源等) +- **Persistence**:`nacos/persistence/` - 持久化层 + +#### 第四层:高级主题(特定场景) +- **MCP 适配**:`nacos/mcp-registry-adaptor/` - MCP 注册适配器 +- **Istio 集成**:`nacos/istio/` - Istio MCP/xDS 协议对接 +- **K8s 同步**:`nacos/k8s-sync/` - Kubernetes 服务同步 +- **AI 能力**:`nacos/ai/` - AI 相关能力 +- **Skill 市场**:`nacos/skills/` - Skill 市场能力 +- **Distribution**:`nacos/distribution/` - 打包和发布配置 + +### 何时探索 Nacos + +仅在以下场景需要深入探索: +- 实现或调试 HiMarket 与 Nacos 的服务注册/发现集成 +- 对接 Nacos 配置管理能力(动态配置推送、监听) +- 排查 Nacos 客户端连接、心跳、同步等问题 +- 理解 Nacos 的一致性协议(Raft/Distro)实现细节 +- 扩展 Nacos 插件(鉴权、数据源、配置加密等) +- 对接 Nacos 的 Open API 或使用其 Java SDK + +**对于其他 HiMarket 功能开发(产品管理、用户认证、网关配置等),无需关注 Nacos 目录。** + +## 其他文档 + +- 用户指南:`USER_GUIDE.md` diff --git a/README.md b/README.md index a8fbf8b2a..e03cee90e 100644 --- a/README.md +++ b/README.md @@ -37,7 +37,7 @@ ## What is HiMarket? -HiMarket is an enterprise-grade AI open platform built on Higress AI Gateway, helping enterprises build private AI capability marketplace to uniformly manage and distribute AI resources such as LLM, MCP Server, and Agent. The platform encapsulates distributed AI capabilities into standardized API products, supports multi-version management and gray-scale release, provides self-service developer portal, and features comprehensive enterprise-level operation capabilities including security control, observability analysis, metering and billing, making AI resource sharing and reuse efficient and convenient. +HiMarket is an enterprise-grade AI open platform built on Higress AI Gateway, helping enterprises build private AI capability marketplace to uniformly manage and distribute AI resources such as LLM, MCP Server, Agent, and Agent Skill. The platform encapsulates distributed AI capabilities into standardized API products, supports multi-version management and gray-scale release, provides self-service developer portal, and features comprehensive enterprise-level operation capabilities including security control, observability analysis, metering and billing, making AI resource sharing and reuse efficient and convenient.
HiMarket 核心能力 @@ -56,7 +56,7 @@ HiMarket is an enterprise-grade AI open platform built on Higress AI Gateway, he HiMarket system architecture consists of three layers: 1. **Infrastructure**: Composed of AI Gateway, API Gateway, Higress and Nacos. HiMarket abstracts and encapsulates underlying AI resources based on these components to form standard API products for external use. -2. **AI Open Platform Admin**: Management platform for administrators to create and customize portals, manage AI resources such as MCP Server, Model, and Agent, including setting authentication policies and subscription approval workflows. The admin portal also provides observability dashboard to help administrators monitor AI resource usage and operational status in real-time. +2. **AI Open Platform Admin**: Management platform for administrators to create and customize portals, manage AI resources such as MCP Server, Model, Agent, and Agent Skill, including setting authentication policies and subscription approval workflows. The admin portal also provides observability dashboard to help administrators monitor AI resource usage and operational status in real-time. 3. **AI Open Platform Portal**: Developer-facing portal site, also known as AI Marketplace or AI Hub, providing one-stop self-service where developers can complete identity registration, credential application, product browsing and subscription, online debugging, and more. diff --git a/README_zh.md b/README_zh.md index 78c8b073b..57bec3271 100644 --- a/README_zh.md +++ b/README_zh.md @@ -37,7 +37,7 @@ ## HiMarket 是什么? -HiMarket 是基于 Higress AI 网关构建的企业级 AI 开放平台,帮助企业构建私有 AI 能力市场,统一管理和分发 LLM、MCP Server、Agent 等 AI 资源。平台将分散的 AI 能力封装为标准化的 API 产品,支持多版本管理和灰度发布,提供自助式开发者门户,并具备安全管控、观测分析、计量计费等完整的企业级运营能力,让 AI 资源的共享和复用变得高效便捷。 +HiMarket 是基于 Higress AI 网关构建的企业级 AI 开放平台,帮助企业构建私有 AI 能力市场,统一管理和分发 LLM、MCP Server、Agent、Agent Skill 等 AI 资源。平台将分散的 AI 能力封装为标准化的 API 产品,支持多版本管理和灰度发布,提供自助式开发者门户,并具备安全管控、观测分析、计量计费等完整的企业级运营能力,让 AI 资源的共享和复用变得高效便捷。
HiMarket 核心能力 @@ -56,7 +56,7 @@ HiMarket 是基于 Higress AI 网关构建的企业级 AI 开放平台,帮助 HiMarket 系统架构分为三层: 1. **基础设施**:由 AI 网关、API 网关、Higress 和 Nacos 组成。HiMarket 基于这些组件对底层 AI 资源进行抽象封装,形成可对外开放的标准 API 产品。 -2. **AI 开放平台后台**:面向管理员的管理平台,管理员可以创建和定制门户,管理 MCP Server、Model、Agent 等 AI 资源,例如设置鉴权策略、订阅审批流程等。后台还提供可观测大盘,帮助管理员实时了解 AI 资源的使用和运行状态。 +2. **AI 开放平台后台**:面向管理员的管理平台,管理员可以创建和定制门户,管理 MCP Server、Model、Agent、Agent Skill 等 AI 资源,例如设置鉴权策略、订阅审批流程等。后台还提供可观测大盘,帮助管理员实时了解 AI 资源的使用和运行状态。 3. **AI 开放平台前台**:面向外部开发者的门户站点,也称为 AI 市场或 AI 中台,提供一站式自助服务,开发者可以完成身份注册、凭证申请、浏览订阅产品、在线调试等操作。
diff --git "a/deploy/docker/Docker\351\203\250\347\275\262\350\204\232\346\234\254\350\257\264\346\230\216.md" "b/deploy/docker/Docker\351\203\250\347\275\262\350\204\232\346\234\254\350\257\264\346\230\216.md" index 1447fd3af..e0838a224 100644 --- "a/deploy/docker/Docker\351\203\250\347\275\262\350\204\232\346\234\254\350\257\264\346\230\216.md" +++ "b/deploy/docker/Docker\351\203\250\347\275\262\350\204\232\346\234\254\350\257\264\346\230\216.md" @@ -58,7 +58,7 @@ vi .env #### 1. 镜像配置 ```bash # Nacos -NACOS_IMAGE=nacos-registry.cn-hangzhou.cr.aliyuncs.com/nacos/nacos-server:v3.1.1-slim +NACOS_IMAGE=nacos-registry.cn-hangzhou.cr.aliyuncs.com/nacos/nacos-server:v3.2.0-BETA-slim # Higress HIGRESS_IMAGE=higress-registry.cn-hangzhou.cr.aliyuncs.com/higress/all-in-one:latest diff --git a/deploy/docker/docker-compose.yml b/deploy/docker/docker-compose.yml index e6791dda0..07f0437ba 100644 --- a/deploy/docker/docker-compose.yml +++ b/deploy/docker/docker-compose.yml @@ -31,7 +31,7 @@ services: # Nacos Server - 仅在未启用商业化 Nacos 时部署 nacos: - image: ${NACOS_IMAGE:-nacos-registry.cn-hangzhou.cr.aliyuncs.com/nacos/nacos-server:v3.1.1} + image: ${NACOS_IMAGE:-nacos-registry.cn-hangzhou.cr.aliyuncs.com/nacos/nacos-server:v3.2.0-BETA} container_name: nacos-standalone environment: - PREFER_HOST_MODE=hostname @@ -91,6 +91,18 @@ services: profiles: - higress-gateway + # Sandbox Shared (远程沙箱) + sandbox-shared: + image: ${SANDBOX_IMAGE:-opensource-registry.cn-hangzhou.cr.aliyuncs.com/higress-group/sandbox:latest} + container_name: sandbox-shared + environment: + - ALLOWED_COMMANDS=${SANDBOX_ALLOWED_COMMANDS:-qodercli,qwen,claude-agent-acp,opencode} + volumes: + - ./data/sandbox-workspace:/workspace + networks: + - himarket-network + restart: always + # Himarket Server himarket-server: image: ${HIMARKET_SERVER_IMAGE:-opensource-registry.cn-hangzhou.cr.aliyuncs.com/higress-group/himarket-server:latest} @@ -101,8 +113,13 @@ services: - DB_NAME=${DB_NAME:-portal_db} - DB_USERNAME=${DB_USERNAME:-portal_user} - DB_PASSWORD=${DB_PASSWORD:-portal_pass} + - ACP_REMOTE_HOST=${ACP_REMOTE_HOST:-sandbox-shared} + - ACP_REMOTE_PORT=${ACP_REMOTE_PORT:-8080} + - ACP_DEFAULT_RUNTIME=${ACP_DEFAULT_RUNTIME:-remote} ports: - "8081:8080" + depends_on: + - sandbox-shared networks: - himarket-network restart: always @@ -133,4 +150,6 @@ services: - himarket-server networks: - himarket-network - restart: always \ No newline at end of file + restart: always + + diff --git a/deploy/docker/scripts/data/.env b/deploy/docker/scripts/data/.env index 011610ceb..1fc9a20cd 100644 --- a/deploy/docker/scripts/data/.env +++ b/deploy/docker/scripts/data/.env @@ -1,6 +1,6 @@ # Nacos Configuration # 对于使用 Arm 芯片(如 M1/M2/M3 系列)的 Mac 用户,需要在支持 arm arch 的镜像版本后添加 -slim。 -NACOS_IMAGE=nacos-registry.cn-hangzhou.cr.aliyuncs.com/nacos/nacos-server:v3.1.1 +NACOS_IMAGE=nacos-registry.cn-hangzhou.cr.aliyuncs.com/nacos/nacos-server:v3.2.0-BETA NACOS_AUTH_IDENTITY_KEY=serverIdentity NACOS_AUTH_IDENTITY_VALUE=security NACOS_AUTH_TOKEN=VGhpc0lzTXlDdXN0b21TZWNyZXRLZXkwMTIzNDU2Nzg= diff --git "a/deploy/helm/Helm\351\203\250\347\275\262\350\204\232\346\234\254\350\257\264\346\230\216.md" "b/deploy/helm/Helm\351\203\250\347\275\262\350\204\232\346\234\254\350\257\264\346\230\216.md" index 09659e2e2..f015260a8 100644 --- "a/deploy/helm/Helm\351\203\250\347\275\262\350\204\232\346\234\254\350\257\264\346\230\216.md" +++ "b/deploy/helm/Helm\351\203\250\347\275\262\350\204\232\346\234\254\350\257\264\346\230\216.md" @@ -100,7 +100,7 @@ USE_COMMERCIAL_NACOS=false #### 5. Nacos 配置 ```bash # 开源 Nacos 版本 -NACOS_VERSION=v3.1.1 +NACOS_VERSION=v3.2.0-BETA NACOS_ADMIN_PASSWORD=nacos # 商业化 Nacos 配置(当 USE_COMMERCIAL_NACOS=true 时必填) diff --git a/deploy/helm/charts/nacos/.helmignore b/deploy/helm/charts/nacos/.helmignore new file mode 100644 index 000000000..eb4df70b6 --- /dev/null +++ b/deploy/helm/charts/nacos/.helmignore @@ -0,0 +1,4 @@ +nacos-setup/ +nacos-installer.sh +init-db.sh +sql/ diff --git a/deploy/helm/charts/nacos/Chart.yaml b/deploy/helm/charts/nacos/Chart.yaml new file mode 100644 index 000000000..7d5bf5109 --- /dev/null +++ b/deploy/helm/charts/nacos/Chart.yaml @@ -0,0 +1,6 @@ +apiVersion: v2 +name: nacos +description: Nacos Server - 动态服务发现、配置管理平台 +type: application +version: 0.1.0 +appVersion: "3.2.0-BETA" diff --git a/deploy/helm/charts/nacos/init-db.sh b/deploy/helm/charts/nacos/init-db.sh new file mode 100755 index 000000000..c673617b5 --- /dev/null +++ b/deploy/helm/charts/nacos/init-db.sh @@ -0,0 +1,206 @@ +#!/usr/bin/env bash +# +# Nacos 数据库初始化脚本 +# 用法: ./init-db.sh -h -P -u -p -d +# 依赖: mysql client +# +# 功能: +# 1. 创建数据库(如不存在) +# 2. 执行 mysql-schema.sql 建表(幂等,CREATE TABLE IF NOT EXISTS) +# 3. 执行 sql/upgrade/*.sql 升级脚本(如有,按文件名排序) + +set -euo pipefail + +# ============================================================================ +# 默认值(可通过命令行参数或环境变量覆盖) +# ============================================================================ + +DB_HOST="${DB_HOST:-}" +DB_PORT="${DB_PORT:-3306}" +DB_USERNAME="${DB_USERNAME:-}" +DB_PASSWORD="${DB_PASSWORD:-}" +DB_NAME="${DB_NAME:-nacos}" + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +SQL_DIR="${SCRIPT_DIR}/sql" +SCHEMA_FILE="${SQL_DIR}/mysql-schema.sql" +UPGRADE_DIR="${SQL_DIR}/upgrade" + +MAX_WAIT=60 +INTERVAL=3 + +# ============================================================================ +# 颜色输出 +# ============================================================================ + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' + +info() { echo -e "${GREEN}[INFO]${NC} $1"; } +warn() { echo -e "${YELLOW}[WARN]${NC} $1"; } +error() { echo -e "${RED}[ERROR]${NC} $1"; } + +# ============================================================================ +# 参数解析 +# ============================================================================ + +usage() { + cat <&1 +} + +wait_for_mysql() { + info "等待 MySQL 连接 ${DB_HOST}:${DB_PORT} ..." + local elapsed=0 + + while [[ $elapsed -lt $MAX_WAIT ]]; do + if mysql_cmd -e "SELECT 1" >/dev/null 2>&1; then + info "MySQL 连接成功" + return 0 + fi + sleep "$INTERVAL" + elapsed=$((elapsed + INTERVAL)) + echo -n "." + done + + echo "" + error "MySQL 连接超时(${MAX_WAIT}s): ${DB_HOST}:${DB_PORT}" + exit 1 +} + +# ============================================================================ +# 主逻辑 +# ============================================================================ + +create_database() { + info "创建数据库(如不存在): ${DB_NAME}" + mysql_cmd -e "CREATE DATABASE IF NOT EXISTS \`${DB_NAME}\` DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;" + info "数据库就绪: ${DB_NAME}" +} + +execute_schema() { + if [[ ! -f "$SCHEMA_FILE" ]]; then + error "Schema 文件不存在: ${SCHEMA_FILE}" + exit 1 + fi + + info "执行 Schema: $(basename "$SCHEMA_FILE")" + mysql_cmd "$DB_NAME" < "$SCHEMA_FILE" + info "Schema 执行完成" +} + +execute_upgrades() { + if [[ ! -d "$UPGRADE_DIR" ]]; then + return + fi + + local sql_files + sql_files=$(find "$UPGRADE_DIR" -maxdepth 1 -name '*.sql' -type f 2>/dev/null | sort) + + if [[ -z "$sql_files" ]]; then + info "无升级 SQL 需要执行" + return + fi + + info "执行升级 SQL ..." + while IFS= read -r sql_file; do + info " -> $(basename "$sql_file")" + mysql_cmd "$DB_NAME" < "$sql_file" + done <<< "$sql_files" + info "升级 SQL 执行完成" +} + +show_summary() { + local table_count + table_count=$(mysql_cmd "$DB_NAME" -N -e "SELECT COUNT(*) FROM information_schema.tables WHERE table_schema='${DB_NAME}';") + + echo "" + info "============================================" + info "Nacos 数据库初始化完成" + info "============================================" + info " 地址: ${DB_HOST}:${DB_PORT}" + info " 库名: ${DB_NAME}" + info " 表数: ${table_count}" + info "============================================" + echo "" +} + +main() { + parse_args "$@" + validate_args + + # 检查 mysql client + if ! command -v mysql &>/dev/null; then + error "未找到 mysql 命令行客户端,请先安装" + error " macOS: brew install mysql-client" + error " Ubuntu: sudo apt-get install mysql-client" + exit 1 + fi + + echo "" + info "Nacos 数据库初始化" + info " 目标: ${DB_HOST}:${DB_PORT}/${DB_NAME}" + echo "" + + wait_for_mysql + create_database + execute_schema + execute_upgrades + show_summary +} + +main "$@" diff --git a/deploy/helm/charts/nacos/nacos-installer.sh b/deploy/helm/charts/nacos/nacos-installer.sh new file mode 100644 index 000000000..b17ace6e4 --- /dev/null +++ b/deploy/helm/charts/nacos/nacos-installer.sh @@ -0,0 +1,854 @@ +#!/bin/bash + +# Nacos Setup Installation Script +# This script downloads and installs nacos-setup from remote repository + +set -e + +# Colors +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +print_info() { + echo -e "${BLUE}[INFO]${NC} $1" +} + +print_success() { + echo -e "${GREEN}[SUCCESS]${NC} $1" +} + +print_warn() { + echo -e "${YELLOW}[WARN]${NC} $1" +} + +print_error() { + echo -e "${RED}[ERROR]${NC} $1" +} + +# ============================================================================ +# Configuration +# ============================================================================ + +DOWNLOAD_BASE_URL="https://download.nacos.io" +# nacos-setup version configuration +NACOS_SETUP_VERSION="${NACOS_SETUP_VERSION:-0.0.1}" +# nacos-cli configuration +NACOS_CLI_VERSION="${NACOS_CLI_VERSION:-0.0.8}" +INSTALL_BASE_DIR="/usr/local" +CURRENT_LINK="nacos-setup" +BIN_DIR="/usr/local/bin" +SCRIPT_NAME="nacos-setup" +TEMP_DIR="/tmp/nacos-setup-install-$$" +CACHE_DIR="${HOME}/.nacos/cache" # 缓存目录 + +# ============================================================================ +# Check Requirements +# ============================================================================ + +check_requirements() { + print_info "Checking system requirements..." + + # Check if running on macOS or Linux + if [[ "$OSTYPE" != "darwin"* ]] && [[ "$OSTYPE" != "linux-gnu"* ]]; then + print_error "Unsupported OS: $OSTYPE" + print_error "This script only supports macOS and Linux" + exit 1 + fi + + # Check for required commands + local missing_commands=() + + if ! command -v curl >/dev/null 2>&1 && ! command -v wget >/dev/null 2>&1; then + missing_commands+=("curl or wget") + fi + + if ! command -v unzip >/dev/null 2>&1; then + missing_commands+=("unzip") + fi + + if [ ${#missing_commands[@]} -gt 0 ]; then + print_error "Missing required commands: ${missing_commands[*]}" + echo "" + print_info "Please install missing commands:" + if [[ "$OSTYPE" == "darwin"* ]]; then + echo " brew install curl unzip" + else + echo " sudo apt-get install curl unzip # Debian/Ubuntu" + echo " sudo yum install curl unzip # CentOS/RHEL" + fi + return 1 + fi + + # Check if we have write permission to install directory + local mode="${1:-full}" + if [[ "$mode" == "onlycli" ]]; then + if [ ! -w "$BIN_DIR" ]; then + print_warn "No write permission to $BIN_DIR" + print_warn "You may need to run with sudo" + return 1 + fi + else + if [ ! -w "$INSTALL_BASE_DIR" ]; then + print_warn "No write permission to $INSTALL_BASE_DIR" + print_warn "You may need to run with sudo" + return 1 + fi + fi + + return 0 +} + +# ============================================================================ +# Download +# ============================================================================ + +download_file() { + local url=$1 + local output=$2 + + print_info "Downloading from $url..." >&2 + + # Try curl first, then wget + if command -v curl >/dev/null 2>&1; then + if curl -fSL --progress-bar "$url" -o "$output"; then + return 0 + else + print_error "Download failed with curl" >&2 + return 1 + fi + elif command -v wget >/dev/null 2>&1; then + if wget -q --show-progress "$url" -O "$output"; then + return 0 + else + print_error "Download failed with wget" >&2 + return 1 + fi + else + print_error "Neither curl nor wget is available" >&2 + return 1 + fi +} + +# Download nacos-setup package with caching support +# Parameters: version +# Returns: path to zip file (in cache or temp) or empty on error +download_nacos_setup() { + local version=$1 + local zip_filename="nacos-setup-${version}.zip" + local download_url="${DOWNLOAD_BASE_URL}/nacos-setup-${version}.zip" + local cached_file="$CACHE_DIR/$zip_filename" + + # Create cache directory + mkdir -p "$CACHE_DIR" 2>/dev/null + + # Check if cached file exists and is valid + if [ -f "$cached_file" ] && [ -s "$cached_file" ]; then + # Verify the cached zip file is valid + if unzip -t "$cached_file" >/dev/null 2>&1; then + print_info "Found cached package: $cached_file" >&2 + print_info "Skipping download, using cached file" >&2 + echo "" >&2 + echo "$cached_file" + return 0 + else + print_warn "Cached file is corrupted, re-downloading..." >&2 + rm -f "$cached_file" + fi + fi + + # Download the file to cache + print_info "Downloading nacos-setup version: $version" >&2 + echo "" >&2 + + if ! download_file "$download_url" "$cached_file"; then + print_error "Failed to download nacos-setup" >&2 + rm -f "$cached_file" + return 1 + fi + + echo "" >&2 + + # Verify downloaded file is a valid zip + if ! unzip -t "$cached_file" >/dev/null 2>&1; then + print_error "Downloaded file is corrupted or invalid" >&2 + rm -f "$cached_file" + return 1 + fi + + print_info "Download completed: $zip_filename" >&2 + echo "$cached_file" + return 0 +} + +# Download nacos-cli package with caching support +# Parameters: version, os, arch +# Returns: path to zip file (in cache) or empty on error +download_nacos_cli() { + local version=$1 + local os=$2 + local arch=$3 + local zip_filename="nacos-cli-${version}-${os}-${arch}.zip" + local download_url="${DOWNLOAD_BASE_URL}/${zip_filename}" + local cached_file="$CACHE_DIR/$zip_filename" + + # Create cache directory + mkdir -p "$CACHE_DIR" 2>/dev/null + + # Check if cached file exists and is valid + if [ -f "$cached_file" ] && [ -s "$cached_file" ]; then + # Verify the cached zip file is valid + if unzip -t "$cached_file" >/dev/null 2>&1; then + print_info "Found cached package: $cached_file" >&2 + print_info "Skipping download, using cached file" >&2 + echo "" >&2 + echo "$cached_file" + return 0 + else + print_warn "Cached file is corrupted, re-downloading..." >&2 + rm -f "$cached_file" + fi + fi + + # Download the file to cache + print_info "Downloading nacos-cli version: $version" >&2 + echo "" >&2 + + if ! download_file "$download_url" "$cached_file"; then + print_error "Failed to download nacos-cli" >&2 + rm -f "$cached_file" + return 1 + fi + + echo "" >&2 + + # Verify downloaded file is a valid zip + if ! unzip -t "$cached_file" >/dev/null 2>&1; then + print_error "Downloaded file is corrupted or invalid" >&2 + rm -f "$cached_file" + return 1 + fi + + print_info "Download completed: $zip_filename" >&2 + echo "$cached_file" + return 0 +} + +# ============================================================================ +# Installation +# ============================================================================ + +install_nacos_setup() { + print_info "Installing nacos-setup..." + echo "" + + # Get version from environment variable or use default + local setup_version="${NACOS_SETUP_VERSION}" + + print_info "Target version: $setup_version" + + # Download nacos-setup (with caching) + # If cached version exists, it will be used directly + # If not, download from remote and save to cache + local zip_file=$(download_nacos_setup "$setup_version") + + if [ -z "$zip_file" ]; then + print_error "Failed to download nacos-setup" + exit 1 + fi + + print_success "Package ready: $zip_file" + echo "" + + # Create temporary directory for extraction + mkdir -p "$TEMP_DIR" + + # Extract zip file + print_info "Extracting nacos-setup..." + if ! unzip -q "$zip_file" -d "$TEMP_DIR"; then + print_error "Failed to extract zip file" + rm -rf "$TEMP_DIR" + exit 1 + fi + + # Find extracted directory (should be nacos-setup-VERSION or similar) + local extracted_dir=$(find "$TEMP_DIR" -maxdepth 1 -type d ! -path "$TEMP_DIR" | head -1) + + if [ -z "$extracted_dir" ] || [ ! -d "$extracted_dir" ]; then + print_error "Failed to find extracted directory" + rm -rf "$TEMP_DIR" + exit 1 + fi + + # Verify required files + if [ ! -f "$extracted_dir/nacos-setup.sh" ]; then + print_error "nacos-setup.sh not found in package" + rm -rf "$TEMP_DIR" + exit 1 + fi + + if [ ! -d "$extracted_dir/lib" ]; then + print_error "lib directory not found in package" + rm -rf "$TEMP_DIR" + exit 1 + fi + + # Prepare versioned installation directory + local INSTALL_DIR="$INSTALL_BASE_DIR/${CURRENT_LINK}-$setup_version" + + # Remove old installation for this version if exists + if [ -d "$INSTALL_DIR" ]; then + print_info "Removing old installation..." + rm -rf "$INSTALL_DIR" + fi + + # Create installation directory + print_info "Creating installation directory: $INSTALL_DIR" + mkdir -p "$INSTALL_DIR" + + # Copy nacos-setup.sh to bin directory + print_info "Installing nacos-setup command..." + mkdir -p "$INSTALL_DIR/bin" + cp "$extracted_dir/nacos-setup.sh" "$INSTALL_DIR/bin/$SCRIPT_NAME" + chmod +x "$INSTALL_DIR/bin/$SCRIPT_NAME" + + # Copy lib directory + print_info "Installing libraries..." + cp -r "$extracted_dir/lib" "$INSTALL_DIR/" + + # Make all lib scripts executable + chmod +x "$INSTALL_DIR/lib"/*.sh + + # Create or update current symlink and global command + print_info "Updating active version symlink: $INSTALL_BASE_DIR/$CURRENT_LINK -> nacos-setup-$setup_version" + if [ -L "$INSTALL_BASE_DIR/$CURRENT_LINK" ] || [ -e "$INSTALL_BASE_DIR/$CURRENT_LINK" ]; then + rm -f "$INSTALL_BASE_DIR/$CURRENT_LINK" + fi + ln -s "nacos-setup-$setup_version" "$INSTALL_BASE_DIR/$CURRENT_LINK" + + print_info "Creating global command..." + # Ensure bin directory exists + mkdir -p "$BIN_DIR" + + # Remove old symlink if exists + if [ -L "$BIN_DIR/$SCRIPT_NAME" ] || [ -f "$BIN_DIR/$SCRIPT_NAME" ]; then + rm -f "$BIN_DIR/$SCRIPT_NAME" + fi + + # Create symlink with absolute path + local target_script="$INSTALL_BASE_DIR/$CURRENT_LINK/bin/$SCRIPT_NAME" + + # Verify target exists before creating symlink + if [ ! -f "$target_script" ]; then + print_error "Target script not found: $target_script" + rm -rf "$TEMP_DIR" + exit 1 + fi + + ln -s "$target_script" "$BIN_DIR/$SCRIPT_NAME" + + # Verify symlink was created successfully + if [ ! -L "$BIN_DIR/$SCRIPT_NAME" ]; then + print_error "Failed to create symlink at $BIN_DIR/$SCRIPT_NAME" + rm -rf "$TEMP_DIR" + exit 1 + fi + + print_info "Global command created: $BIN_DIR/$SCRIPT_NAME -> $target_script" + + # Cleanup temporary directory + rm -rf "$TEMP_DIR" + + # Store version info + echo "$setup_version" > "$INSTALL_DIR/.version" + + # Fix permissions for Nacos installation directory + # Allow current user to manage Nacos instances without sudo + local nacos_base_dir="${HOME}/ai-infra/nacos" + if [ -d "$nacos_base_dir" ]; then + print_info "Setting ownership of Nacos directory to current user..." + if ! sudo chown -R "$USER:$(id -gn)" "$nacos_base_dir" 2>/dev/null; then + print_warn "Failed to change ownership of $nacos_base_dir" + print_info "You can fix this manually with: sudo chown -R \$USER:\$(id -gn) $nacos_base_dir" + fi + fi + + print_success "Installation completed!" + echo "" + + # Export version for later use + INSTALLED_VERSION="$setup_version" +} + +# ============================================================================ +# nacos-cli Installation +# ============================================================================ + +install_nacos_cli() { + local version="${NACOS_CLI_VERSION}" + + print_info "Preparing to install nacos-cli version $version..." + + # Detect OS + local os="" + if [[ "$OSTYPE" == "darwin"* ]]; then + os="darwin" + elif [[ "$OSTYPE" == "linux-gnu"* ]] || [[ "$OSTYPE" == "linux"* ]]; then + os="linux" + elif [[ "$OSTYPE" == "msys"* ]] || [[ "$OSTYPE" == "cygwin"* ]] || [[ "$OSTYPE" == "win32"* ]]; then + os="windows" + else + # Try using uname as fallback + local uname_os + uname_os=$(uname -s 2>/dev/null || echo "") + if [[ "$uname_os" == "Darwin" ]]; then + os="darwin" + elif [[ "$uname_os" == "Linux" ]]; then + os="linux" + elif [[ "$uname_os" == MINGW* ]] || [[ "$uname_os" == MSYS* ]] || [[ "$uname_os" == CYGWIN* ]]; then + os="windows" + else + print_warn "Unsupported OS for nacos-cli: $OSTYPE (uname: $uname_os)" + return 1 + fi + fi + + # Detect architecture + local arch="" + local uname_arch + uname_arch=$(uname -m) + case "$uname_arch" in + x86_64|amd64) + arch="amd64" + ;; + arm64|aarch64) + arch="arm64" + ;; + *) + print_warn "Unsupported architecture for nacos-cli: $uname_arch" + return 1 + ;; + esac + local url="${DOWNLOAD_BASE_URL}/nacos-cli-${version}-${os}-${arch}.zip" + local zip_filename="nacos-cli-${version}-${os}-${arch}.zip" + + # Download nacos-cli (with caching) + local zip_file=$(download_nacos_cli "$version" "$os" "$arch") + + if [ -z "$zip_file" ]; then + print_error "Failed to download nacos-cli" + return 1 + fi + + print_success "Package ready: $zip_file" + echo "" + + # Create temporary directory for extraction + local tmp_dir + tmp_dir=$(mktemp -d "/tmp/nacos-cli-extract-$$.XXXXXX") || { + print_error "Failed to create temp directory for nacos-cli extraction" + return 1 + } + + # Extract zip file + print_info "Extracting nacos-cli..." + if ! unzip -q "$zip_file" -d "$tmp_dir"; then + print_error "Failed to extract zip file" + rm -rf "$tmp_dir" + return 1 + fi + + # Expected binary filename: nacos-cli-{version}-{os}-{arch} or nacos-cli-{version}-{os}-{arch}.exe + local expected_binary_name="nacos-cli-${version}-${os}-${arch}" + local expected_binary_name_exe="${expected_binary_name}.exe" + local binary_path + + # For Windows, prioritize .exe files; for others, prioritize files without extension + if [[ "$os" == "windows" ]]; then + # Try .exe first for Windows + binary_path=$(find "$tmp_dir" -name "$expected_binary_name_exe" -type f | head -1) + # Fallback to non-.exe (shouldn't happen, but just in case) + if [ -z "$binary_path" ]; then + binary_path=$(find "$tmp_dir" -name "$expected_binary_name" -type f | head -1) + fi + else + # For non-Windows, try without .exe first + binary_path=$(find "$tmp_dir" -name "$expected_binary_name" -type f | head -1) + # Fallback to .exe (shouldn't happen, but just in case) + if [ -z "$binary_path" ]; then + binary_path=$(find "$tmp_dir" -name "$expected_binary_name_exe" -type f | head -1) + fi + fi + + if [ -z "$binary_path" ] || [ ! -f "$binary_path" ]; then + local expected_names="$expected_binary_name" + if [[ "$os" == "windows" ]]; then + expected_names="$expected_binary_name_exe (or $expected_binary_name)" + else + expected_names="$expected_binary_name (or $expected_binary_name_exe)" + fi + print_error "Binary file not found in package. Expected: $expected_names" + print_info "Available files in package:" + find "$tmp_dir" -type f | sed 's|^| |' + rm -rf "$tmp_dir" + return 1 + fi + + # Ensure bin dir exists + mkdir -p "$BIN_DIR" + + # Determine target binary name (add .exe for Windows) + local target_binary_name="nacos-cli" + if [[ "$os" == "windows" ]]; then + target_binary_name="nacos-cli.exe" + fi + + # Install binary + if ! cp "$binary_path" "$BIN_DIR/$target_binary_name"; then + print_error "Failed to copy nacos-cli to $BIN_DIR (permission denied?)" + rm -rf "$tmp_dir" + return 1 + fi + + # Set executable permission (not needed on Windows, but harmless) + if ! chmod +x "$BIN_DIR/$target_binary_name" 2>/dev/null; then + # On Windows, chmod might fail, which is fine + if [[ "$os" != "windows" ]]; then + print_warn "Failed to mark nacos-cli as executable: $BIN_DIR/$target_binary_name" + fi + fi + + # On macOS, add ad-hoc signature to avoid Gatekeeper killing the binary + if [[ "$os" == "darwin" ]]; then + if command -v codesign >/dev/null 2>&1; then + if ! codesign --force --deep --sign - "$BIN_DIR/$target_binary_name" >/dev/null 2>&1; then + print_warn "Failed to codesign nacos-cli (may be blocked by Gatekeeper): $BIN_DIR/$target_binary_name" + fi + else + print_warn "codesign not found; nacos-cli may be blocked by Gatekeeper" + fi + fi + + # Cleanup + rm -rf "$tmp_dir" + + print_success "nacos-cli $version installed to $BIN_DIR/$target_binary_name" +} + +# ============================================================================ +# Verification +# ============================================================================ + +verify_installation() { + print_info "Verifying installation..." + + # Check if the symlink or file exists (use -e for both files and symlinks) + if [ ! -e "$BIN_DIR/$SCRIPT_NAME" ]; then + print_error "Installation failed: $BIN_DIR/$SCRIPT_NAME not found" + return 1 + fi + + # Check if the symlink target exists (resolve and check the actual target) + if [ -L "$BIN_DIR/$SCRIPT_NAME" ]; then + local link_path="$BIN_DIR/$SCRIPT_NAME" + # Follow the symlink to check if target is accessible + if [ ! -e "$link_path" ]; then + local target=$(readlink "$link_path") + print_error "Installation failed: Broken symlink at $link_path" + print_error "Target does not exist: $target" + return 1 + fi + fi + + if ! command -v $SCRIPT_NAME >/dev/null 2>&1; then + print_info "Configuring PATH automatically..." + + # Detect shell configuration file + local shell_config="" + if [ -n "$SHELL" ]; then + case "$SHELL" in + */zsh) + shell_config="$HOME/.zshrc" + ;; + */bash) + shell_config="$HOME/.bashrc" + ;; + esac + fi + + # Fallback: detect by checking which file exists + if [ -z "$shell_config" ]; then + if [ -f "$HOME/.zshrc" ]; then + shell_config="$HOME/.zshrc" + elif [ -f "$HOME/.bashrc" ]; then + shell_config="$HOME/.bashrc" + else + # Create .bashrc if nothing exists + shell_config="$HOME/.bashrc" + fi + fi + + # Check if PATH is already configured + local path_export="export PATH=\"$BIN_DIR:\$PATH\"" + if ! grep -qF "$BIN_DIR" "$shell_config" 2>/dev/null; then + echo "" >> "$shell_config" + echo "# Added by nacos-setup installer" >> "$shell_config" + echo "$path_export" >> "$shell_config" + print_success "PATH configured in $shell_config" + else + print_info "PATH already configured in $shell_config" + fi + + # Note: We cannot automatically source in the current shell due to shell limitations + # The script runs in a subshell, sourcing only affects the subshell, not the parent shell + # But we can use the command directly via absolute path + print_info "PATH will be available in new terminal sessions" + echo "" + return 0 + fi + + print_success "Installation verified successfully!" + echo "" + + return 0 +} + + +# ============================================================================ +# Post-installation Info +# ============================================================================ + +print_usage_info() { + local version="${INSTALLED_VERSION:-unknown}" + local install_location="unknown" + if [ -L "$INSTALL_BASE_DIR/$CURRENT_LINK" ]; then + install_location="$INSTALL_BASE_DIR/$(readlink "$INSTALL_BASE_DIR/$CURRENT_LINK")" + fi + + echo "========================================" + echo " Nacos Setup Installation Complete" + echo "========================================" + echo "" + echo "Version: $version" + echo "Installation location: $install_location" + echo "Global command: $SCRIPT_NAME" + echo "" + echo "Quick Start:" + echo "" + echo " # Show help" + echo " $SCRIPT_NAME --help" + echo "" + echo " # Install Nacos standalone" + echo " $SCRIPT_NAME -v 3.1.1" + echo "" + echo " # Install Nacos cluster" + echo " $SCRIPT_NAME -c prod -n 3" + echo "" + echo " # Configure datasource" + echo " $SCRIPT_NAME --datasource-conf" + echo "" + echo "Documentation: https://nacos.io" + echo "" + echo "========================================" +} + +# ============================================================================ +# Version Check +# ============================================================================ + +check_installed_version() { + # Read active version from current symlink + if [ -L "$INSTALL_BASE_DIR/$CURRENT_LINK" ]; then + local target=$(readlink "$INSTALL_BASE_DIR/$CURRENT_LINK") + local active_dir="$INSTALL_BASE_DIR/$target" + if [ -f "$active_dir/.version" ]; then + local version=$(cat "$active_dir/.version") + print_info "Installed nacos-setup version: $version" + print_info "Installation location: $active_dir" + return 0 + fi + fi + + print_warn "nacos-setup is not installed or version information not found" + return 1 +} + +# ============================================================================ +# Uninstallation +# ============================================================================ + +uninstall_nacos_setup() { + print_info "Uninstalling nacos-setup (active version)..." + + # If current symlink exists, remove the target directory + if [ -L "$INSTALL_BASE_DIR/$CURRENT_LINK" ]; then + local target=$(readlink "$INSTALL_BASE_DIR/$CURRENT_LINK") + local target_dir="$INSTALL_BASE_DIR/$target" + if [ -d "$target_dir" ]; then + rm -rf "$target_dir" + print_success "Removed $target_dir" + fi + + # Remove current symlink + rm -f "$INSTALL_BASE_DIR/$CURRENT_LINK" + print_success "Removed $INSTALL_BASE_DIR/$CURRENT_LINK" + else + print_warn "No active installation found at $INSTALL_BASE_DIR/$CURRENT_LINK" + fi + + # Remove global command + if [ -L "$BIN_DIR/$SCRIPT_NAME" ] || [ -f "$BIN_DIR/$SCRIPT_NAME" ]; then + rm -f "$BIN_DIR/$SCRIPT_NAME" + print_success "Removed $BIN_DIR/$SCRIPT_NAME" + fi + + print_success "Uninstallation completed!" + echo "" +} + +# ============================================================================ +# Main +# ============================================================================ + +main() { + echo "" + echo "========================================" + echo " Nacos Setup Installer" + echo "========================================" + echo "" + + # Parse arguments + local install_cli=false + local only_cli=false + local cli_version="" + + while [[ $# -gt 0 ]]; do + case "$1" in + version|--version) + check_installed_version + exit $? + ;; + uninstall|--uninstall|-u) + uninstall_nacos_setup + exit 0 + ;; + --cli) + install_cli=true + only_cli=true + shift + ;; + -v|--version-cli) + if [ -z "$2" ] || [[ "$2" == -* ]]; then + print_error "Option $1 requires a version number" + echo "" + print_info "Usage: ./nacos-installer.sh --cli -v " + exit 1 + fi + if [[ "$install_cli" != true ]]; then + print_error "Option $1 can only be used with --cli" + echo "" + print_info "Usage: ./nacos-installer.sh --cli -v " + exit 1 + fi + cli_version="$2" + shift 2 + ;; + --help|-h) + echo "Usage: curl -fsSL https://nacos.io/installer.sh | sudo bash" + echo "" + echo "Install nacos-setup and nacos-cli tools for managing Nacos instances." + echo "" + echo "Options:" + echo " (none) Install nacos-setup" + echo " --cli Install nacos-cli only" + echo " -v, --version-cli Specify nacos-cli version (with --cli)" + echo " version Show installed version" + echo " uninstall, -u Uninstall nacos-setup" + echo " --help, -h Show this help message" + echo "" + echo "Examples:" + echo " ./nacos-installer.sh --cli Install latest nacos-cli" + echo " ./nacos-installer.sh --cli -v 0.0.3 Install nacos-cli v0.0.3" + echo "" + echo "After installation, use 'nacos-setup' command to manage Nacos:" + echo " nacos-setup --help Show nacos-setup help" + echo " nacos-setup -v 3.1.1 Install Nacos standalone" + echo " nacos-setup -c prod -n 3 Install Nacos cluster" + echo "" + exit 0 + ;; + *) + print_error "Unknown option: $1" + echo "" + print_info "Use --help for usage information" + exit 1 + ;; + esac + done + + # Apply CLI version if specified + if [ -n "$cli_version" ]; then + NACOS_CLI_VERSION="$cli_version" + fi + + # Check requirements + if ! check_requirements "${only_cli:+onlycli}"; then + print_error "Requirements check failed" + print_info "Try running with sudo: curl -fsSL https://nacos.io/installer.sh | sudo bash" + exit 1 + fi + + if [[ "$only_cli" == true ]]; then + echo "" + install_nacos_cli + exit $? + fi + + # Install + install_nacos_setup + + # Verify + if verify_installation; then + print_usage_info + + # Install nacos-cli if --cli flag is provided + if [[ "$install_cli" == true ]]; then + echo "" + install_nacos_cli + fi + + # After installation, offer to install Nacos (default version) + echo "" + # Try to detect default Nacos version from installed script + detected_default_version="3.1.1" + installed_script="$INSTALL_BASE_DIR/$CURRENT_LINK/bin/$SCRIPT_NAME" + if [ -f "$installed_script" ]; then + v=$(sed -n 's/^DEFAULT_VERSION="\(.*\)"/\1/p' "$installed_script" || true) + if [ -n "$v" ]; then + detected_default_version="$v" + fi + fi + + read -p "Do you want to install Nacos $detected_default_version now? (Y/n): " -r REPLY + echo "" + if [[ "$REPLY" =~ ^[Yy]?$ ]] || [[ -z "$REPLY" ]]; then + print_info "Installing Nacos $detected_default_version..." + # Always use absolute path to ensure it works even if PATH is not yet loaded + "$BIN_DIR/$SCRIPT_NAME" -v "$detected_default_version" + else + print_info "Skipping Nacos installation." + print_info "To install later, run: $SCRIPT_NAME -v $detected_default_version" + print_info "Or use absolute path: $BIN_DIR/$SCRIPT_NAME -v $detected_default_version" + fi + + exit 0 + else + print_error "Installation verification failed" + exit 1 + fi +} + +# Run main +main "$@" diff --git a/deploy/helm/charts/nacos/sql/mysql-schema.sql b/deploy/helm/charts/nacos/sql/mysql-schema.sql new file mode 100644 index 000000000..9e09dfe1e --- /dev/null +++ b/deploy/helm/charts/nacos/sql/mysql-schema.sql @@ -0,0 +1,179 @@ +/* + * Copyright 1999-2018 Alibaba Group Holding Ltd. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/******************************************/ +/* 表名称 = config_info */ +/******************************************/ +CREATE TABLE IF NOT EXISTS `config_info` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'id', + `data_id` varchar(255) NOT NULL COMMENT 'data_id', + `group_id` varchar(128) DEFAULT NULL COMMENT 'group_id', + `content` longtext NOT NULL COMMENT 'content', + `md5` varchar(32) DEFAULT NULL COMMENT 'md5', + `gmt_create` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', + `gmt_modified` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '修改时间', + `src_user` text COMMENT 'source user', + `src_ip` varchar(50) DEFAULT NULL COMMENT 'source ip', + `app_name` varchar(128) DEFAULT NULL COMMENT 'app_name', + `tenant_id` varchar(128) DEFAULT '' COMMENT '租户字段', + `c_desc` varchar(256) DEFAULT NULL COMMENT 'configuration description', + `c_use` varchar(64) DEFAULT NULL COMMENT 'configuration usage', + `effect` varchar(64) DEFAULT NULL COMMENT '配置生效的描述', + `type` varchar(64) DEFAULT NULL COMMENT '配置的类型', + `c_schema` text COMMENT '配置的模式', + `encrypted_data_key` varchar(1024) NOT NULL DEFAULT '' COMMENT '密钥', + PRIMARY KEY (`id`), + UNIQUE KEY `uk_configinfo_datagrouptenant` (`data_id`,`group_id`,`tenant_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='config_info'; + +/******************************************/ +/* 表名称 = config_info since 2.5.0 */ +/******************************************/ +CREATE TABLE IF NOT EXISTS `config_info_gray` ( + `id` bigint unsigned NOT NULL AUTO_INCREMENT COMMENT 'id', + `data_id` varchar(255) NOT NULL COMMENT 'data_id', + `group_id` varchar(128) NOT NULL COMMENT 'group_id', + `content` longtext NOT NULL COMMENT 'content', + `md5` varchar(32) DEFAULT NULL COMMENT 'md5', + `src_user` text COMMENT 'src_user', + `src_ip` varchar(100) DEFAULT NULL COMMENT 'src_ip', + `gmt_create` datetime(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'gmt_create', + `gmt_modified` datetime(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'gmt_modified', + `app_name` varchar(128) DEFAULT NULL COMMENT 'app_name', + `tenant_id` varchar(128) DEFAULT '' COMMENT 'tenant_id', + `gray_name` varchar(128) NOT NULL COMMENT 'gray_name', + `gray_rule` text NOT NULL COMMENT 'gray_rule', + `encrypted_data_key` varchar(256) NOT NULL DEFAULT '' COMMENT 'encrypted_data_key', + PRIMARY KEY (`id`), + UNIQUE KEY `uk_configinfogray_datagrouptenantgray` (`data_id`,`group_id`,`tenant_id`,`gray_name`), + KEY `idx_dataid_gmt_modified` (`data_id`,`gmt_modified`), + KEY `idx_gmt_modified` (`gmt_modified`) +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='config_info_gray'; + +/******************************************/ +/* 表名称 = config_tags_relation */ +/******************************************/ +CREATE TABLE IF NOT EXISTS `config_tags_relation` ( + `id` bigint(20) NOT NULL COMMENT 'id', + `tag_name` varchar(128) NOT NULL COMMENT 'tag_name', + `tag_type` varchar(64) DEFAULT NULL COMMENT 'tag_type', + `data_id` varchar(255) NOT NULL COMMENT 'data_id', + `group_id` varchar(128) NOT NULL COMMENT 'group_id', + `tenant_id` varchar(128) DEFAULT '' COMMENT 'tenant_id', + `nid` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'nid, 自增长标识', + PRIMARY KEY (`nid`), + UNIQUE KEY `uk_configtagrelation_configidtag` (`id`,`tag_name`,`tag_type`), + KEY `idx_tenant_id` (`tenant_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='config_tag_relation'; + +/******************************************/ +/* 表名称 = group_capacity */ +/******************************************/ +CREATE TABLE IF NOT EXISTS `group_capacity` ( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键ID', + `group_id` varchar(128) NOT NULL DEFAULT '' COMMENT 'Group ID,空字符表示整个集群', + `quota` int(10) unsigned NOT NULL DEFAULT '0' COMMENT '配额,0表示使用默认值', + `usage` int(10) unsigned NOT NULL DEFAULT '0' COMMENT '使用量', + `max_size` int(10) unsigned NOT NULL DEFAULT '0' COMMENT '单个配置大小上限,单位为字节,0表示使用默认值', + `max_aggr_count` int(10) unsigned NOT NULL DEFAULT '0' COMMENT '聚合子配置最大个数,,0表示使用默认值', + `max_aggr_size` int(10) unsigned NOT NULL DEFAULT '0' COMMENT '单个聚合数据的子配置大小上限,单位为字节,0表示使用默认值', + `max_history_count` int(10) unsigned NOT NULL DEFAULT '0' COMMENT '最大变更历史数量', + `gmt_create` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', + `gmt_modified` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '修改时间', + PRIMARY KEY (`id`), + UNIQUE KEY `uk_group_id` (`group_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='集群、各Group容量信息表'; + +/******************************************/ +/* 表名称 = his_config_info */ +/******************************************/ +CREATE TABLE IF NOT EXISTS `his_config_info` ( + `id` bigint(20) unsigned NOT NULL COMMENT 'id', + `nid` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT 'nid, 自增标识', + `data_id` varchar(255) NOT NULL COMMENT 'data_id', + `group_id` varchar(128) NOT NULL COMMENT 'group_id', + `app_name` varchar(128) DEFAULT NULL COMMENT 'app_name', + `content` longtext NOT NULL COMMENT 'content', + `md5` varchar(32) DEFAULT NULL COMMENT 'md5', + `gmt_create` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', + `gmt_modified` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '修改时间', + `src_user` text COMMENT 'source user', + `src_ip` varchar(50) DEFAULT NULL COMMENT 'source ip', + `op_type` char(10) DEFAULT NULL COMMENT 'operation type', + `tenant_id` varchar(128) DEFAULT '' COMMENT '租户字段', + `encrypted_data_key` varchar(1024) NOT NULL DEFAULT '' COMMENT '密钥', + `publish_type` varchar(50) DEFAULT 'formal' COMMENT 'publish type gray or formal', + `gray_name` varchar(50) DEFAULT NULL COMMENT 'gray name', + `ext_info` longtext DEFAULT NULL COMMENT 'ext info', + PRIMARY KEY (`nid`), + KEY `idx_gmt_create` (`gmt_create`), + KEY `idx_gmt_modified` (`gmt_modified`), + KEY `idx_did` (`data_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='多租户改造'; + + +/******************************************/ +/* 表名称 = tenant_capacity */ +/******************************************/ +CREATE TABLE IF NOT EXISTS `tenant_capacity` ( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键ID', + `tenant_id` varchar(128) NOT NULL DEFAULT '' COMMENT 'Tenant ID', + `quota` int(10) unsigned NOT NULL DEFAULT '0' COMMENT '配额,0表示使用默认值', + `usage` int(10) unsigned NOT NULL DEFAULT '0' COMMENT '使用量', + `max_size` int(10) unsigned NOT NULL DEFAULT '0' COMMENT '单个配置大小上限,单位为字节,0表示使用默认值', + `max_aggr_count` int(10) unsigned NOT NULL DEFAULT '0' COMMENT '聚合子配置最大个数', + `max_aggr_size` int(10) unsigned NOT NULL DEFAULT '0' COMMENT '单个聚合数据的子配置大小上限,单位为字节,0表示使用默认值', + `max_history_count` int(10) unsigned NOT NULL DEFAULT '0' COMMENT '最大变更历史数量', + `gmt_create` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', + `gmt_modified` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '修改时间', + PRIMARY KEY (`id`), + UNIQUE KEY `uk_tenant_id` (`tenant_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='租户容量信息表'; + + +CREATE TABLE IF NOT EXISTS `tenant_info` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'id', + `kp` varchar(128) NOT NULL COMMENT 'kp', + `tenant_id` varchar(128) default '' COMMENT 'tenant_id', + `tenant_name` varchar(128) default '' COMMENT 'tenant_name', + `tenant_desc` varchar(256) DEFAULT NULL COMMENT 'tenant_desc', + `create_source` varchar(32) DEFAULT NULL COMMENT 'create_source', + `gmt_create` bigint(20) NOT NULL COMMENT '创建时间', + `gmt_modified` bigint(20) NOT NULL COMMENT '修改时间', + PRIMARY KEY (`id`), + UNIQUE KEY `uk_tenant_info_kptenantid` (`kp`,`tenant_id`), + KEY `idx_tenant_id` (`tenant_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='tenant_info'; + +CREATE TABLE IF NOT EXISTS `users` ( + `username` varchar(50) NOT NULL PRIMARY KEY COMMENT 'username', + `password` varchar(500) NOT NULL COMMENT 'password', + `enabled` boolean NOT NULL COMMENT 'enabled' +); + +CREATE TABLE IF NOT EXISTS `roles` ( + `username` varchar(50) NOT NULL COMMENT 'username', + `role` varchar(50) NOT NULL COMMENT 'role', + UNIQUE INDEX `idx_user_role` (`username` ASC, `role` ASC) USING BTREE +); + +CREATE TABLE IF NOT EXISTS `permissions` ( + `role` varchar(50) NOT NULL COMMENT 'role', + `resource` varchar(128) NOT NULL COMMENT 'resource', + `action` varchar(8) NOT NULL COMMENT 'action', + UNIQUE INDEX `uk_role_permission` (`role`,`resource`,`action`) USING BTREE +); + diff --git a/deploy/helm/charts/nacos/sql/pg-schema.sql b/deploy/helm/charts/nacos/sql/pg-schema.sql new file mode 100644 index 000000000..e6987ae7b --- /dev/null +++ b/deploy/helm/charts/nacos/sql/pg-schema.sql @@ -0,0 +1,410 @@ +/* + * Copyright 1999-2018 Alibaba Group Holding Ltd. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- ---------------------------- +-- Table structure for config_info +-- ---------------------------- +DROP TABLE IF EXISTS "config_info"; +CREATE TABLE "config_info" ( + "id" bigserial NOT NULL, + "data_id" varchar(255) NOT NULL, + "group_id" varchar(255) , + "content" text NOT NULL, + "md5" varchar(32) , + "gmt_create" timestamp(6) NOT NULL, + "gmt_modified" timestamp(6) NOT NULL, + "src_user" text , + "src_ip" varchar(20) , + "app_name" varchar(128) , + "tenant_id" varchar(128) , + "c_desc" varchar(256) , + "c_use" varchar(64) , + "effect" varchar(64) , + "type" varchar(64) , + "c_schema" text , + "encrypted_data_key" text NOT NULL +) +; + +COMMENT ON COLUMN "config_info"."id" IS 'id'; +COMMENT ON COLUMN "config_info"."data_id" IS 'data_id'; +COMMENT ON COLUMN "config_info"."content" IS 'content'; +COMMENT ON COLUMN "config_info"."md5" IS 'md5'; +COMMENT ON COLUMN "config_info"."gmt_create" IS '创建时间'; +COMMENT ON COLUMN "config_info"."gmt_modified" IS '修改时间'; +COMMENT ON COLUMN "config_info"."src_user" IS 'source user'; +COMMENT ON COLUMN "config_info"."src_ip" IS 'source ip'; +COMMENT ON COLUMN "config_info"."tenant_id" IS '租户字段'; +COMMENT ON COLUMN "config_info"."encrypted_data_key" IS '秘钥'; +COMMENT ON TABLE "config_info" IS 'config_info'; + +-- ---------------------------- +-- Table structure for config_info_gray +-- ---------------------------- +DROP TABLE IF EXISTS "config_info_gray"; +CREATE TABLE "config_info_gray" ( + "id" bigserial NOT NULL, + "data_id" varchar(255) NOT NULL, + "group_id" varchar(128) NOT NULL, + "content" text NOT NULL, + "md5" varchar(32), + "src_user" text, + "src_ip" varchar(100), + "gmt_create" timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "gmt_modified" timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "app_name" varchar(128), + "tenant_id" varchar(128) DEFAULT '', + "gray_name" varchar(128) NOT NULL, + "gray_rule" text NOT NULL, + "encrypted_data_key" varchar(256) NOT NULL DEFAULT '' +); + +COMMENT ON COLUMN "config_info_gray"."id" IS 'id'; +COMMENT ON COLUMN "config_info_gray"."data_id" IS 'data_id'; +COMMENT ON COLUMN "config_info_gray"."group_id" IS 'group_id'; +COMMENT ON COLUMN "config_info_gray"."content" IS 'content'; +COMMENT ON COLUMN "config_info_gray"."md5" IS 'md5'; +COMMENT ON COLUMN "config_info_gray"."src_user" IS 'source user'; +COMMENT ON COLUMN "config_info_gray"."src_ip" IS 'source ip'; +COMMENT ON COLUMN "config_info_gray"."gmt_create" IS '创建时间'; +COMMENT ON COLUMN "config_info_gray"."gmt_modified" IS '修改时间'; +COMMENT ON COLUMN "config_info_gray"."app_name" IS 'app_name'; +COMMENT ON COLUMN "config_info_gray"."tenant_id" IS '租户字段'; +COMMENT ON COLUMN "config_info_gray"."gray_name" IS '灰度名称'; +COMMENT ON COLUMN "config_info_gray"."gray_rule" IS '灰度规则'; +COMMENT ON COLUMN "config_info_gray"."encrypted_data_key" IS '秘钥'; +COMMENT ON TABLE "config_info_gray" IS 'config_info_gray'; + +-- 创建索引 +CREATE UNIQUE INDEX "uk_configinfogray_datagrouptenantgray" ON "config_info_gray" USING btree ("data_id", "group_id", "tenant_id", "gray_name"); +CREATE INDEX "idx_dataid_gmt_modified_gray" ON "config_info_gray" USING btree ("data_id", "gmt_modified"); +CREATE INDEX "idx_gmt_modified_gray" ON "config_info_gray" USING btree ("gmt_modified"); + +ALTER TABLE "config_info_gray" ADD CONSTRAINT "config_info_gray_pkey" PRIMARY KEY ("id"); + +-- ---------------------------- +-- Records of config_info_gray +-- ---------------------------- +BEGIN; +COMMIT; + + +-- ---------------------------- +-- Table structure for config_tags_relation +-- ---------------------------- +DROP TABLE IF EXISTS "config_tags_relation"; +CREATE TABLE "config_tags_relation" ( + "id" bigint NOT NULL, + "tag_name" varchar(128) NOT NULL, + "tag_type" varchar(64) , + "data_id" varchar(255) NOT NULL, + "group_id" varchar(128) NOT NULL, + "tenant_id" varchar(128) , + "nid" bigserial NOT NULL +) +; +COMMENT ON COLUMN "config_tags_relation"."id" IS 'id'; +COMMENT ON COLUMN "config_tags_relation"."tag_name" IS 'tag_name'; +COMMENT ON COLUMN "config_tags_relation"."tag_type" IS 'tag_type'; +COMMENT ON COLUMN "config_tags_relation"."data_id" IS 'data_id'; +COMMENT ON COLUMN "config_tags_relation"."group_id" IS 'group_id'; +COMMENT ON COLUMN "config_tags_relation"."tenant_id" IS 'tenant_id'; +COMMENT ON TABLE "config_tags_relation" IS 'config_tag_relation'; + +-- ---------------------------- +-- Records of config_tags_relation +-- ---------------------------- +BEGIN; +COMMIT; + +-- ---------------------------- +-- Table structure for group_capacity +-- ---------------------------- +DROP TABLE IF EXISTS "group_capacity"; +CREATE TABLE "group_capacity" ( + "id" bigserial NOT NULL, + "group_id" varchar(128) NOT NULL, + "quota" int4 NOT NULL, + "usage" int4 NOT NULL, + "max_size" int4 NOT NULL, + "max_aggr_count" int4 NOT NULL, + "max_aggr_size" int4 NOT NULL, + "max_history_count" int4 NOT NULL, + "gmt_create" timestamp(6) NOT NULL, + "gmt_modified" timestamp(6) NOT NULL +) +; +COMMENT ON COLUMN "group_capacity"."id" IS '主键ID'; +COMMENT ON COLUMN "group_capacity"."group_id" IS 'Group ID,空字符表示整个集群'; +COMMENT ON COLUMN "group_capacity"."quota" IS '配额,0表示使用默认值'; +COMMENT ON COLUMN "group_capacity"."usage" IS '使用量'; +COMMENT ON COLUMN "group_capacity"."max_size" IS '单个配置大小上限,单位为字节,0表示使用默认值'; +COMMENT ON COLUMN "group_capacity"."max_aggr_count" IS '聚合子配置最大个数,,0表示使用默认值'; +COMMENT ON COLUMN "group_capacity"."max_aggr_size" IS '单个聚合数据的子配置大小上限,单位为字节,0表示使用默认值'; +COMMENT ON COLUMN "group_capacity"."max_history_count" IS '最大变更历史数量'; +COMMENT ON COLUMN "group_capacity"."gmt_create" IS '创建时间'; +COMMENT ON COLUMN "group_capacity"."gmt_modified" IS '修改时间'; +COMMENT ON TABLE "group_capacity" IS '集群、各Group容量信息表'; + +-- ---------------------------- +-- Records of group_capacity +-- ---------------------------- +BEGIN; +COMMIT; + +-- ---------------------------- +-- Table structure for his_config_info +-- ---------------------------- +DROP TABLE IF EXISTS "his_config_info"; +CREATE TABLE "his_config_info" ( + "id" int8 NOT NULL, + "nid" bigserial NOT NULL, + "data_id" varchar(255) NOT NULL, + "group_id" varchar(128) NOT NULL, + "app_name" varchar(128) , + "content" text NOT NULL, + "md5" varchar(32) , + "gmt_create" timestamp(6) NOT NULL DEFAULT '2010-05-05 00:00:00', + "gmt_modified" timestamp(6) NOT NULL, + "src_user" text , + "src_ip" varchar(20) , + "op_type" char(10) , + "tenant_id" varchar(128) , + "encrypted_data_key" text NOT NULL, + "publish_type" varchar(50) DEFAULT 'formal', + "gray_name" varchar(50), + "ext_info" text +) +; +COMMENT ON COLUMN "his_config_info"."app_name" IS 'app_name'; +COMMENT ON COLUMN "his_config_info"."tenant_id" IS '租户字段'; +COMMENT ON COLUMN "his_config_info"."encrypted_data_key" IS '秘钥'; +COMMENT ON COLUMN "his_config_info"."publish_type" IS 'publish type gray or formal'; +COMMENT ON COLUMN "his_config_info"."gray_name" IS 'gray name'; +COMMENT ON COLUMN "his_config_info"."ext_info" IS 'ext info'; +COMMENT ON TABLE "his_config_info" IS '多租户改造'; + + +-- ---------------------------- +-- Table structure for permissions +-- ---------------------------- +DROP TABLE IF EXISTS "permissions"; +CREATE TABLE "permissions" ( + "role" varchar(50) NOT NULL, + "resource" varchar(512) NOT NULL, + "action" varchar(8) NOT NULL +) +; + +-- ---------------------------- +-- Records of permissions +-- ---------------------------- +BEGIN; +COMMIT; + +-- ---------------------------- +-- Table structure for roles +-- ---------------------------- +DROP TABLE IF EXISTS "roles"; +CREATE TABLE "roles" ( + "username" varchar(50) NOT NULL, + "role" varchar(50) NOT NULL +) +; + +-- ---------------------------- +-- Records of roles +-- ---------------------------- +BEGIN; +INSERT INTO "roles" VALUES ('nacos', 'ROLE_ADMIN'); +COMMIT; + +-- ---------------------------- +-- Table structure for tenant_capacity +-- ---------------------------- +DROP TABLE IF EXISTS "tenant_capacity"; +CREATE TABLE "tenant_capacity" ( + "id" bigserial NOT NULL, + "tenant_id" varchar(128) NOT NULL, + "quota" int4 NOT NULL, + "usage" int4 NOT NULL, + "max_size" int4 NOT NULL, + "max_aggr_count" int4 NOT NULL, + "max_aggr_size" int4 NOT NULL, + "max_history_count" int4 NOT NULL, + "gmt_create" timestamp(6) NOT NULL, + "gmt_modified" timestamp(6) NOT NULL +) +; +COMMENT ON COLUMN "tenant_capacity"."id" IS '主键ID'; +COMMENT ON COLUMN "tenant_capacity"."tenant_id" IS 'Tenant ID'; +COMMENT ON COLUMN "tenant_capacity"."quota" IS '配额,0表示使用默认值'; +COMMENT ON COLUMN "tenant_capacity"."usage" IS '使用量'; +COMMENT ON COLUMN "tenant_capacity"."max_size" IS '单个配置大小上限,单位为字节,0表示使用默认值'; +COMMENT ON COLUMN "tenant_capacity"."max_aggr_count" IS '聚合子配置最大个数'; +COMMENT ON COLUMN "tenant_capacity"."max_aggr_size" IS '单个聚合数据的子配置大小上限,单位为字节,0表示使用默认值'; +COMMENT ON COLUMN "tenant_capacity"."max_history_count" IS '最大变更历史数量'; +COMMENT ON COLUMN "tenant_capacity"."gmt_create" IS '创建时间'; +COMMENT ON COLUMN "tenant_capacity"."gmt_modified" IS '修改时间'; +COMMENT ON TABLE "tenant_capacity" IS '租户容量信息表'; + +-- ---------------------------- +-- Records of tenant_capacity +-- ---------------------------- +BEGIN; +COMMIT; + +-- ---------------------------- +-- Table structure for tenant_info +-- ---------------------------- +DROP TABLE IF EXISTS "tenant_info"; +CREATE TABLE "tenant_info" ( + "id" bigserial NOT NULL, + "kp" varchar(128) NOT NULL, + "tenant_id" varchar(128) , + "tenant_name" varchar(128) , + "tenant_desc" varchar(256) , + "create_source" varchar(32) , + "gmt_create" int8 NOT NULL, + "gmt_modified" int8 NOT NULL +) +; +COMMENT ON COLUMN "tenant_info"."id" IS 'id'; +COMMENT ON COLUMN "tenant_info"."kp" IS 'kp'; +COMMENT ON COLUMN "tenant_info"."tenant_id" IS 'tenant_id'; +COMMENT ON COLUMN "tenant_info"."tenant_name" IS 'tenant_name'; +COMMENT ON COLUMN "tenant_info"."tenant_desc" IS 'tenant_desc'; +COMMENT ON COLUMN "tenant_info"."create_source" IS 'create_source'; +COMMENT ON COLUMN "tenant_info"."gmt_create" IS '创建时间'; +COMMENT ON COLUMN "tenant_info"."gmt_modified" IS '修改时间'; +COMMENT ON TABLE "tenant_info" IS 'tenant_info'; + +-- ---------------------------- +-- Records of tenant_info +-- ---------------------------- +BEGIN; +COMMIT; + +-- ---------------------------- +-- Table structure for users +-- ---------------------------- +DROP TABLE IF EXISTS "users"; +CREATE TABLE "users" ( + "username" varchar(50) NOT NULL, + "password" varchar(500) NOT NULL, + "enabled" boolean NOT NULL +) +; + +-- ---------------------------- +-- Records of users +-- ---------------------------- +BEGIN; +COMMIT; + +-- ---------------------------- +-- Indexes structure for table config_info +-- ---------------------------- +CREATE UNIQUE INDEX "uk_configinfo_datagrouptenant" ON "config_info" ("data_id","group_id","tenant_id"); + +-- ---------------------------- +-- Primary Key structure for table config_info +-- ---------------------------- +ALTER TABLE "config_info" ADD CONSTRAINT "config_info_pkey" PRIMARY KEY ("id"); + +-- ---------------------------- +-- Indexes structure for table config_tags_relation +-- ---------------------------- +CREATE INDEX "idx_tenant_id" ON "config_tags_relation" USING btree ( + "tenant_id" +); +CREATE UNIQUE INDEX "uk_configtagrelation_configidtag" ON "config_tags_relation" USING btree ( + "id", + "tag_name", + "tag_type" +); + +-- ---------------------------- +-- Primary Key structure for table config_tags_relation +-- ---------------------------- +ALTER TABLE "config_tags_relation" ADD CONSTRAINT "config_tags_relation_pkey" PRIMARY KEY ("nid"); + +-- ---------------------------- +-- Indexes structure for table group_capacity +-- ---------------------------- +CREATE UNIQUE INDEX "uk_group_id" ON "group_capacity" USING btree ( + "group_id" +); + +-- ---------------------------- +-- Primary Key structure for table group_capacity +-- ---------------------------- +ALTER TABLE "group_capacity" ADD CONSTRAINT "group_capacity_pkey" PRIMARY KEY ("id"); + +-- ---------------------------- +-- Indexes structure for table his_config_info +-- ---------------------------- +CREATE INDEX "idx_did" ON "his_config_info" USING btree ( + "data_id" +); +CREATE INDEX "idx_gmt_create" ON "his_config_info" USING btree ( + "gmt_create" +); +CREATE INDEX "idx_gmt_modified" ON "his_config_info" USING btree ( + "gmt_modified" +); + +-- ---------------------------- +-- Primary Key structure for table his_config_info +-- ---------------------------- +ALTER TABLE "his_config_info" ADD CONSTRAINT "his_config_info_pkey" PRIMARY KEY ("nid"); + +-- ---------------------------- +-- Indexes structure for table permissions +-- ---------------------------- +CREATE UNIQUE INDEX "uk_role_permission" ON "permissions" USING btree ( + "role", + "resource", + "action" +); + +-- ---------------------------- +-- Indexes structure for table roles +-- ---------------------------- +CREATE UNIQUE INDEX "uk_username_role" ON "roles" USING btree ( + "username", + "role" +); + +-- ---------------------------- +-- Indexes structure for table tenant_capacity +-- ---------------------------- +CREATE UNIQUE INDEX "uk_tenant_id" ON "tenant_capacity" USING btree ( + "tenant_id" +); + +-- ---------------------------- +-- Primary Key structure for table tenant_capacity +-- ---------------------------- +ALTER TABLE "tenant_capacity" ADD CONSTRAINT "tenant_capacity_pkey" PRIMARY KEY ("id"); + +-- ---------------------------- +-- Indexes structure for table tenant_info +-- ---------------------------- +CREATE UNIQUE INDEX "uk_tenant_info_kptenantid" ON "tenant_info" USING btree ( + "kp", + "tenant_id" +); diff --git a/deploy/helm/charts/nacos/sql/upgrade/.gitkeep b/deploy/helm/charts/nacos/sql/upgrade/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/deploy/helm/charts/nacos/templates/deployment.yaml b/deploy/helm/charts/nacos/templates/deployment.yaml new file mode 100644 index 000000000..760a0b052 --- /dev/null +++ b/deploy/helm/charts/nacos/templates/deployment.yaml @@ -0,0 +1,83 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: nacos + labels: + app: nacos +spec: + replicas: 1 + selector: + matchLabels: + app: nacos + template: + metadata: + labels: + app: nacos + spec: + {{- with .Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml . | nindent 8 }} + {{- end }} + containers: + - name: nacos + image: "{{ .Values.image.registry }}/{{ .Values.image.repository }}:{{ .Values.image.tag }}" + imagePullPolicy: {{ .Values.image.pullPolicy }} + command: + - /bin/sh + - -c + - | + export JAVA_OPT="${JAVA_OPT} --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED" + exec /home/nacos/bin/docker-startup.sh + ports: + - name: console + containerPort: 8080 + protocol: TCP + - name: api + containerPort: 8848 + protocol: TCP + - name: grpc + containerPort: 9848 + protocol: TCP + env: + - name: MODE + value: "standalone" + - name: PREFER_HOST_MODE + value: "hostname" + - name: SPRING_DATASOURCE_PLATFORM + value: {{ .Values.database.platform | quote }} + - name: MYSQL_SERVICE_HOST + value: {{ .Values.database.host | quote }} + - name: MYSQL_SERVICE_PORT + value: {{ .Values.database.port | quote }} + - name: MYSQL_SERVICE_DB_NAME + value: {{ .Values.database.name | quote }} + - name: MYSQL_SERVICE_USER + value: {{ .Values.database.username | quote }} + - name: MYSQL_SERVICE_PASSWORD + value: {{ .Values.database.password | quote }} + {{- if .Values.auth.identityKey }} + - name: NACOS_AUTH_IDENTITY_KEY + value: {{ .Values.auth.identityKey | quote }} + {{- end }} + {{- if .Values.auth.identityValue }} + - name: NACOS_AUTH_IDENTITY_VALUE + value: {{ .Values.auth.identityValue | quote }} + {{- end }} + {{- if .Values.auth.token }} + - name: NACOS_AUTH_TOKEN + value: {{ .Values.auth.token | quote }} + {{- end }} + {{- with .Values.resources }} + resources: + {{- toYaml . | nindent 12 }} + {{- end }} + readinessProbe: + tcpSocket: + port: 8848 + initialDelaySeconds: 60 + periodSeconds: 10 + livenessProbe: + tcpSocket: + port: 8848 + initialDelaySeconds: 120 + periodSeconds: 10 diff --git a/deploy/helm/charts/nacos/templates/service.yaml b/deploy/helm/charts/nacos/templates/service.yaml new file mode 100644 index 000000000..a7ee83093 --- /dev/null +++ b/deploy/helm/charts/nacos/templates/service.yaml @@ -0,0 +1,23 @@ +apiVersion: v1 +kind: Service +metadata: + name: nacos + labels: + app: nacos +spec: + type: {{ .Values.service.type }} + ports: + - port: 8080 + targetPort: 8080 + protocol: TCP + name: console + - port: 8848 + targetPort: 8848 + protocol: TCP + name: api + - port: 9848 + targetPort: 9848 + protocol: TCP + name: grpc + selector: + app: nacos diff --git a/deploy/helm/charts/nacos/values.yaml b/deploy/helm/charts/nacos/values.yaml new file mode 100644 index 000000000..872a2eaae --- /dev/null +++ b/deploy/helm/charts/nacos/values.yaml @@ -0,0 +1,33 @@ +image: + registry: nacos-registry.cn-hangzhou.cr.aliyuncs.com + repository: nacos/nacos-server + tag: "v3.2.0-BETA" + pullPolicy: Always + +imagePullSecrets: [] + +service: + type: LoadBalancer + +# 数据库连接(由 deploy.sh 通过 --set 注入,这里仅定义结构) +database: + platform: "mysql" + host: "" + port: "3306" + name: "nacos" + username: "" + password: "" + +# Auth 凭证(提供开箱即用的默认值,生产环境务必通过 --set 覆盖) +auth: + identityKey: "serverIdentity" + identityValue: "security" + token: "VGhpc0lzTXlDdXN0b21TZWNyZXRLZXkwMTIzNDU2Nzg=" + +resources: + requests: + cpu: "500m" + memory: "1Gi" + limits: + cpu: "2" + memory: "4Gi" diff --git a/deploy/helm/scripts/data/.env b/deploy/helm/scripts/data/.env index 5bc21892d..c2fb15649 100644 --- a/deploy/helm/scripts/data/.env +++ b/deploy/helm/scripts/data/.env @@ -9,10 +9,6 @@ HIGRESS_REPO_NAME=higress.io HIGRESS_REPO_URL=https://higress.cn/helm-charts HIGRESS_CHART_REF=higress.io/higress -NACOS_REPO_NAME=ygqygq2 -NACOS_REPO_URL=https://ygqygq2.github.io/charts/ -NACOS_CHART_REF=ygqygq2/nacos - # Himarket 镜像配置 HIMARKET_HUB=opensource-registry.cn-hangzhou.cr.aliyuncs.com/higress-group HIMARKET_IMAGE_TAG=latest @@ -52,7 +48,7 @@ AI_GATEWAY_ACCESS_KEY= AI_GATEWAY_SECRET_KEY= # Nacos 镜像配置 -NACOS_VERSION=v3.1.1 +NACOS_VERSION=v3.2.0-BETA NACOS_IMAGE_REGISTRY=nacos-registry.cn-hangzhou.cr.aliyuncs.com NACOS_IMAGE_REPOSITORY=nacos/nacos-server diff --git a/deploy/helm/scripts/deploy.sh b/deploy/helm/scripts/deploy.sh index 181433f38..2a74dfc69 100755 --- a/deploy/helm/scripts/deploy.sh +++ b/deploy/helm/scripts/deploy.sh @@ -72,9 +72,12 @@ HIGRESS_REPO_NAME="${HIGRESS_REPO_NAME:-higress.io}" HIGRESS_REPO_URL="${HIGRESS_REPO_URL:-https://higress.cn/helm-charts}" HIGRESS_CHART_REF="${HIGRESS_CHART_REF:-higress.io/higress}" -NACOS_REPO_NAME="${NACOS_REPO_NAME:-ygqygq2}" -NACOS_REPO_URL="${NACOS_REPO_URL:-https://ygqygq2.github.io/charts/}" -NACOS_CHART_REF="${NACOS_CHART_REF:-ygqygq2/nacos}" +# Nacos 本地 Chart 路径 +NACOS_CHART_PATH="${PROJECT_ROOT}/charts/nacos" +NACOS_INIT_DB_SCRIPT="${NACOS_CHART_PATH}/init-db.sh" + +# Nacos 数据库名(可在 .env 中覆盖,默认 nacos) +NACOS_DB_NAME="${NACOS_DB_NAME:-nacos}" # Nacos 版本号(可在 .env 中覆盖) NACOS_VERSION="${NACOS_VERSION}" @@ -82,17 +85,11 @@ NACOS_VERSION="${NACOS_VERSION}" # Nacos 镜像配置(可在 .env 中覆盖) NACOS_IMAGE_REGISTRY="${NACOS_IMAGE_REGISTRY:-}" NACOS_IMAGE_REPOSITORY="${NACOS_IMAGE_REPOSITORY:-}" -NACOS_PLUGIN_IMAGE_REGISTRY="${NACOS_PLUGIN_IMAGE_REGISTRY:-}" -NACOS_PLUGIN_IMAGE_REPOSITORY="${NACOS_PLUGIN_IMAGE_REPOSITORY:-}" -NACOS_PLUGIN_IMAGE_TAG="${NACOS_PLUGIN_IMAGE_TAG:-}" -NACOS_INITDB_IMAGE_REGISTRY="${NACOS_INITDB_IMAGE_REGISTRY:-}" -NACOS_INITDB_IMAGE_REPOSITORY="${NACOS_INITDB_IMAGE_REPOSITORY:-}" -NACOS_INITDB_IMAGE_TAG="${NACOS_INITDB_IMAGE_TAG:-}" - -# Nacos MySQL 镜像配置(可在 .env 中覆盖) -NACOS_MYSQL_IMAGE_REGISTRY="${NACOS_MYSQL_IMAGE_REGISTRY:-}" -NACOS_MYSQL_IMAGE_REPOSITORY="${NACOS_MYSQL_IMAGE_REPOSITORY:-}" -NACOS_MYSQL_IMAGE_TAG="${NACOS_MYSQL_IMAGE_TAG:-}" + +# Nacos Auth 配置(可在 .env 中覆盖,不设置则使用 Helm values.yaml 默认值) +NACOS_AUTH_IDENTITY_KEY="${NACOS_AUTH_IDENTITY_KEY:-}" +NACOS_AUTH_IDENTITY_VALUE="${NACOS_AUTH_IDENTITY_VALUE:-}" +NACOS_AUTH_TOKEN="${NACOS_AUTH_TOKEN:-}" # Higress Console 用户名密码(可在 .env 中覆盖) HIGRESS_USERNAME="${HIGRESS_USERNAME:-admin}" @@ -325,9 +322,7 @@ cluster_preflight() { add_repos() { helm repo add "$HIGRESS_REPO_NAME" "$HIGRESS_REPO_URL" - helm repo add "$NACOS_REPO_NAME" "$NACOS_REPO_URL" helm repo update "$HIGRESS_REPO_NAME" - helm repo update "$NACOS_REPO_NAME" } deploy_all() { @@ -380,25 +375,49 @@ deploy_all() { log "商业化 Nacos 将在 post_ready 阶段进行初始化" else log "部署开源 Nacos..." + + # 复用 HiMarket 数据库连接信息,仅库名不同 + local nacos_db_host nacos_db_port nacos_db_user nacos_db_pass + if [[ "${HIMARKET_MYSQL_ENABLED}" == "true" ]]; then + nacos_db_host="mysql-headless-svc" + nacos_db_port="3306" + nacos_db_user="root" + nacos_db_pass=$(kubectl get secret mysql-secret -n "$NS" -o jsonpath='{.data.MYSQL_ROOT_PASSWORD}' 2>/dev/null | base64 -d 2>/dev/null || echo "") + else + nacos_db_host="${EXTERNAL_DB_HOST}" + nacos_db_port="${EXTERNAL_DB_PORT}" + nacos_db_user="${EXTERNAL_DB_USERNAME}" + nacos_db_pass="${EXTERNAL_DB_PASSWORD}" + fi + + # 初始化 Nacos 数据库(建库 + 建表,幂等可重复执行) + log "初始化 Nacos 数据库: ${NACOS_DB_NAME}..." + if [[ -x "$NACOS_INIT_DB_SCRIPT" ]]; then + bash "$NACOS_INIT_DB_SCRIPT" \ + -h "$nacos_db_host" \ + -P "$nacos_db_port" \ + -u "$nacos_db_user" \ + -p "$nacos_db_pass" \ + -d "$NACOS_DB_NAME" + else + log "警告:init-db.sh 不存在或不可执行,跳过数据库初始化: ${NACOS_INIT_DB_SCRIPT}" + fi + local nacos_args=() - nacos_args+=("--set" "service.type=LoadBalancer") - nacos_args+=("--set" "mysql.enabled=true") - - [[ -n "${NACOS_MYSQL_IMAGE_REGISTRY}" ]] && nacos_args+=("--set" "mysql.image.registry=${NACOS_MYSQL_IMAGE_REGISTRY}") - [[ -n "${NACOS_MYSQL_IMAGE_REPOSITORY}" ]] && nacos_args+=("--set" "mysql.image.repository=${NACOS_MYSQL_IMAGE_REPOSITORY}") - [[ -n "${NACOS_MYSQL_IMAGE_TAG}" ]] && nacos_args+=("--set" "mysql.image.tag=${NACOS_MYSQL_IMAGE_TAG}") + nacos_args+=("--set" "database.host=${nacos_db_host}") + nacos_args+=("--set" "database.port=${nacos_db_port}") + nacos_args+=("--set" "database.name=${NACOS_DB_NAME}") + nacos_args+=("--set" "database.username=${nacos_db_user}") + nacos_args+=("--set" "database.password=${nacos_db_pass}") [[ -n "${NACOS_IMAGE_REGISTRY}" ]] && nacos_args+=("--set" "image.registry=${NACOS_IMAGE_REGISTRY}") [[ -n "${NACOS_IMAGE_REPOSITORY}" ]] && nacos_args+=("--set" "image.repository=${NACOS_IMAGE_REPOSITORY}") [[ -n "${NACOS_VERSION}" ]] && nacos_args+=("--set" "image.tag=${NACOS_VERSION}") - [[ -n "${NACOS_PLUGIN_IMAGE_REGISTRY}" ]] && nacos_args+=("--set" "plugin.image.registry=${NACOS_PLUGIN_IMAGE_REGISTRY}") - [[ -n "${NACOS_PLUGIN_IMAGE_REPOSITORY}" ]] && nacos_args+=("--set" "plugin.image.repository=${NACOS_PLUGIN_IMAGE_REPOSITORY}") - [[ -n "${NACOS_PLUGIN_IMAGE_TAG}" ]] && nacos_args+=("--set" "plugin.image.tag=${NACOS_PLUGIN_IMAGE_TAG}") - [[ -n "${NACOS_INITDB_IMAGE_REGISTRY}" ]] && nacos_args+=("--set" "initDB.image.registry=${NACOS_INITDB_IMAGE_REGISTRY}") - [[ -n "${NACOS_INITDB_IMAGE_REPOSITORY}" ]] && nacos_args+=("--set" "initDB.image.repository=${NACOS_INITDB_IMAGE_REPOSITORY}") - [[ -n "${NACOS_INITDB_IMAGE_TAG}" ]] && nacos_args+=("--set" "initDB.image.tag=${NACOS_INITDB_IMAGE_TAG}") - - helm_upsert "nacos" "$NS" "$NACOS_CHART_REF" "${nacos_args[@]}" - wait_rollout "$NS" "statefulset" "nacos" 900 + [[ -n "${NACOS_AUTH_IDENTITY_KEY}" ]] && nacos_args+=("--set" "auth.identityKey=${NACOS_AUTH_IDENTITY_KEY}") + [[ -n "${NACOS_AUTH_IDENTITY_VALUE}" ]] && nacos_args+=("--set" "auth.identityValue=${NACOS_AUTH_IDENTITY_VALUE}") + [[ -n "${NACOS_AUTH_TOKEN}" ]] && nacos_args+=("--set" "auth.token=${NACOS_AUTH_TOKEN}") + + helm_upsert "nacos" "$NS" "$NACOS_CHART_PATH" "${nacos_args[@]}" + wait_rollout "$NS" "deployment" "nacos" 900 fi # 3) 部署 Higress(根据开关决定是否部署) diff --git a/deploy/helm/templates/himarket-server-cm.yaml b/deploy/helm/templates/himarket-server-cm.yaml index 1f4a6698b..9152f45ac 100644 --- a/deploy/helm/templates/himarket-server-cm.yaml +++ b/deploy/helm/templates/himarket-server-cm.yaml @@ -15,4 +15,8 @@ data: DB_USERNAME: {{ .Values.database.username | quote }} DB_PASSWORD: {{ .Values.database.password | quote }} {{- end }} + # ACP 沙箱配置 + ACP_REMOTE_HOST: {{ .Values.sandbox.remoteHost | default "sandbox-shared" | quote }} + ACP_REMOTE_PORT: {{ .Values.sandbox.remotePort | default "8080" | quote }} + ACP_DEFAULT_RUNTIME: {{ .Values.sandbox.defaultRuntime | default "remote" | quote }} # 其他非敏感配置可以在这里添加 \ No newline at end of file diff --git a/deploy/helm/templates/sandbox-shared-deployment.yaml b/deploy/helm/templates/sandbox-shared-deployment.yaml new file mode 100644 index 000000000..e94dd6f6e --- /dev/null +++ b/deploy/helm/templates/sandbox-shared-deployment.yaml @@ -0,0 +1,48 @@ +{{- if .Values.sandbox.enabled }} +apiVersion: apps/v1 +kind: Deployment +metadata: + name: sandbox-shared + labels: + app: sandbox-shared +spec: + replicas: 1 + selector: + matchLabels: + app: sandbox-shared + template: + metadata: + labels: + app: sandbox-shared + spec: + {{- with .Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml . | nindent 8 }} + {{- end }} + containers: + - name: sandbox + image: "{{ .Values.hub }}/{{ .Values.sandbox.image.repository }}:{{ .Values.sandbox.image.tag }}" + imagePullPolicy: {{ .Values.sandbox.image.pullPolicy }} + ports: + - name: http + containerPort: 8080 + protocol: TCP + env: + - name: ALLOWED_COMMANDS + value: {{ .Values.sandbox.allowedCommands | quote }} + volumeMounts: + - name: workspace + mountPath: /workspace + {{- with .Values.sandbox.resources }} + resources: + {{- toYaml . | nindent 12 }} + {{- end }} + volumes: + - name: workspace + {{- if .Values.sandbox.persistence.enabled }} + persistentVolumeClaim: + claimName: sandbox-workspace-pvc + {{- else }} + emptyDir: {} + {{- end }} +{{- end }} diff --git a/deploy/helm/templates/sandbox-shared-pvc.yaml b/deploy/helm/templates/sandbox-shared-pvc.yaml new file mode 100644 index 000000000..448b7a226 --- /dev/null +++ b/deploy/helm/templates/sandbox-shared-pvc.yaml @@ -0,0 +1,21 @@ +{{- if and .Values.sandbox.enabled .Values.sandbox.persistence.enabled }} +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: sandbox-workspace-pvc + labels: + app: sandbox-shared +spec: + accessModes: + - {{ .Values.sandbox.persistence.accessMode }} + resources: + requests: + storage: {{ .Values.sandbox.persistence.size }} + {{- if .Values.sandbox.persistence.storageClass }} + {{- if (eq "-" .Values.sandbox.persistence.storageClass) }} + storageClassName: "" + {{- else }} + storageClassName: {{ .Values.sandbox.persistence.storageClass | quote }} + {{- end }} + {{- end }} +{{- end }} diff --git a/deploy/helm/templates/sandbox-shared-service.yaml b/deploy/helm/templates/sandbox-shared-service.yaml new file mode 100644 index 000000000..c1f0215d8 --- /dev/null +++ b/deploy/helm/templates/sandbox-shared-service.yaml @@ -0,0 +1,17 @@ +{{- if .Values.sandbox.enabled }} +apiVersion: v1 +kind: Service +metadata: + name: sandbox-shared + labels: + app: sandbox-shared +spec: + type: {{ .Values.server.service.type }} + ports: + - port: 8080 + targetPort: 8080 + protocol: TCP + name: http + selector: + app: sandbox-shared +{{- end }} diff --git a/deploy/helm/values.yaml b/deploy/helm/values.yaml index 36c825cc8..e19b40dda 100644 --- a/deploy/helm/values.yaml +++ b/deploy/helm/values.yaml @@ -91,4 +91,26 @@ resources: memory: 2000Mi requests: cpu: 1 - memory: 1000Mi \ No newline at end of file + memory: 1000Mi + +# 共享沙箱配置 +sandbox: + enabled: true + image: + repository: sandbox + tag: "latest" + pullPolicy: Always + resources: + requests: + cpu: "1" + memory: "1Gi" + limits: + cpu: "4" + memory: "8Gi" + allowedCommands: "qodercli,qwen,claude-agent-acp,opencode" + # 工作空间持久化配置 + persistence: + enabled: true + storageClass: "alicloud-disk-essd" # 根据实际环境调整 + size: 50Gi + accessMode: ReadWriteOnce diff --git a/docs/SANDBOX_GUIDE.md b/docs/SANDBOX_GUIDE.md new file mode 100644 index 000000000..54c0f6739 --- /dev/null +++ b/docs/SANDBOX_GUIDE.md @@ -0,0 +1,237 @@ +# Nacos-CLI 沙箱环境使用指南 + +本指南介绍如何在沙箱环境中使用 nacos-cli 和 nacos-env 配置文件实现 skill 下载到指定目录。 + +## 环境配置 + +### 0. 安装 nacos-cli + +Linux / macOS: + +``` +curl -fsSL https://nacos.io/nacos-installer.sh | sudo bash -s -- --cli -v 0.0.9 +``` + +Windows(PowerShell): + +``` +iwr -UseBasicParsing https://nacos.io/nacos-installer.ps1 -OutFile $env:TEMP\nacos-installer.ps1; & $env:TEMP\nacos-installer.ps1 -cli; Remove-Item $env:TEMP\nacos-installer.ps1 +``` + +### 1. 创建 nacos-env 配置文件 + +在沙箱环境中创建配置文件(推荐命名为 `nacos-env.yaml`): + +```yaml +# Nacos 服务器地址 +host: 127.0.0.1 + +# Nacos 服务器端口 +port: 8848 + +# 认证类型:nacos(默认)或 aliyun +authType: nacos + +# Nacos 用户名 +username: nacos + +# Nacos 密码 +password: nacos + +# 命名空间 ID(可选,留空表示 public 命名空间) +namespace: "" + +# 如果使用阿里云 Nacos,需要配置以下字段: +# accessKey: your-access-key +# secretKey: your-secret-key +``` + +### 2. 配置文件路径 + +配置文件支持以下路径格式: +- 绝对路径:`/path/to/nacos-env.yaml` +- 相对路径:`./nacos-env.yaml` +- 波浪号路径:`~/nacos-env.yaml` 或 `~/.config/nacos-env.yaml` + +## Skill 下载命令 + +### 1. 下载 Skill + +#### 使用配置文件 + 默认目录(~/.skills) + +```bash +nacos-cli skill-get skill-creator --config ./nacos-env.yaml +``` + +```bash +nacos-cli skill-get skill1 skill2 skill3 --config ./nacos-env.yaml +``` + +#### 使用配置文件 + 指定目录 + +```bash +# 下载到绝对路径 +nacos-cli skill-get skill-creator --config ./nacos-env.yaml -o /sandbox/skills + +# 下载到相对路径 +nacos-cli skill-get skill-creator --config ./nacos-env.yaml -o ./skills + +# 下载到用户目录 +nacos-cli skill-get skill-creator --config ./nacos-env.yaml -o ~/my-skills +``` + +#### 不使用配置文件(命令行参数) + +```bash +nacos-cli skill-get skill-creator \ + --host 127.0.0.1 \ + --port 8848 \ + -u nacos \ + -p nacos \ + -o /sandbox/skills +``` + +## 沙箱环境典型使用场景 + +### 场景 1:初始化沙箱环境 + +```bash +# 1. 创建配置文件 +cat > /sandbox/nacos-env.yaml << EOF +host: 127.0.0.1 +port: 8848 +authType: nacos +username: nacos +password: nacos +namespace: "" +EOF + +# 2. 创建 skill 目录 +mkdir -p /sandbox/skills + +# 3. 下载所需的 skill +nacos-cli skill-get skill-creator --config /sandbox/nacos-env.yaml -o /sandbox/skills +nacos-cli skill-get skill-analyzer --config /sandbox/nacos-env.yaml -o /sandbox/skills +``` + +### 场景 2:持续同步开发环境 + +```bash +# 启动后台同步进程,监听所有 skill 变化 +nacos-cli skill-sync --all \ + --config /sandbox/nacos-env.yaml \ + -d /sandbox/skills +``` + +### 场景 3:使用不同命名空间 + +```bash +# 开发环境配置 +cat > /sandbox/dev-env.yaml << EOF +host: 127.0.0.1 +port: 8848 +username: nacos +password: nacos +namespace: dev +EOF + +# 生产环境配置 +cat > /sandbox/prod-env.yaml << EOF +host: 127.0.0.1 +port: 8848 +username: nacos +password: nacos +namespace: prod +EOF + +# 从开发环境下载 +nacos-cli skill-get my-skill --config /sandbox/dev-env.yaml -o /sandbox/dev-skills + +# 从生产环境下载 +nacos-cli skill-get my-skill --config /sandbox/prod-env.yaml -o /sandbox/prod-skills +``` + +## 配置优先级 + +配置参数的优先级从高到低: + +1. **命令行参数**(最高优先级) +2. **配置文件** +3. **默认值**(最低优先级) + +示例: + +```bash +# host 使用命令行的 10.0.0.1,其他参数使用配置文件 +nacos-cli skill-get my-skill --config ./nacos-env.yaml --host 10.0.0.1 -o /sandbox/skills +``` + +## 常见问题 + +### 1. 权限问题 + +如果遇到权限错误,确保目标目录有写入权限: + +```bash +chmod 755 /sandbox/skills +``` + +### 2. 连接失败 + +检查 Nacos 服务器是否可访问: + +```bash +curl http://127.0.0.1:8848/nacos/ +``` + +### 3. 认证失败 + +确认配置文件中的用户名和密码正确,或使用命令行参数覆盖: + +```bash +nacos-cli skill-get my-skill --config ./nacos-env.yaml -u admin -p admin123 +``` + +### 4. 命名空间问题 + +如果找不到 skill,检查是否使用了正确的命名空间: + +```bash +# 查看当前命名空间的 skill +nacos-cli skill-list --config ./nacos-env.yaml + +# 切换到其他命名空间 +nacos-cli skill-list --config ./nacos-env.yaml -n your-namespace-id +``` + +## 其他有用命令 + +### 上传 Skill + +```bash +# 上传单个 skill +nacos-cli skill-upload /sandbox/skills/my-skill --config ./nacos-env.yaml + +# 批量上传目录下所有 skill +nacos-cli skill-upload --all /sandbox/skills --config ./nacos-env.yaml +``` + +### 配置管理 + +```bash +# 列出所有配置 +nacos-cli config-list --config ./nacos-env.yaml + +# 获取特定配置 +nacos-cli config-get myconfig DEFAULT_GROUP --config ./nacos-env.yaml +``` + +## 总结 + +在沙箱环境中使用 nacos-cli 的最佳实践: + +1. 创建 nacos-env.yaml` 配置文件 +2. 使用 `--config` 参数引用配置文件 +3. 使用 `-o` 或 `-d` 参数指定 skill 下载目录 +4. 对于开发环境,使用 `skill-sync --all` 保持实时同步 +5. 对于生产环境,使用 `skill-get` 下载特定版本的 skill diff --git a/docs/block-grouping.md b/docs/block-grouping.md new file mode 100644 index 000000000..be837d7b5 --- /dev/null +++ b/docs/block-grouping.md @@ -0,0 +1,1647 @@ +# Block Grouping (分组折叠) 算法与设计文档 + +> 面向复刻的设计参考。描述 AI 对话界面中将连续的低价值操作块折叠成分组卡片的算法思路、策略体系和交互设计。 + +--- + +## 1. 问题背景 + +AI Agent 在执行任务时会产生大量中间操作(读文件、搜索代码、思考等),这些操作对用户来说信息密度低,但数量多、占屏空间大。需要在不丢失信息的前提下,将这些操作折叠成紧凑的"活动卡片",只在用户需要时展开查看细节。 + +**核心矛盾**: + +- 用户关注的是**最终结果**(文件编辑、终端执行、回答文本) +- Agent 的**中间过程**(读文件、搜索、思考)虽有用但不紧急 +- 不同场景(对话问答 vs 自主任务 vs 快速执行)对信息可见度的需求不同 + +**举例**:Agent 收到"帮我重构这个函数"的请求后,可能依次执行: + +``` +[thought] 思考应该怎么做... +[tool_call] read_file("src/utils.ts") ← 探索 +[tool_call] search_codebase("usage of foo") ← 探索 +[tool_call] read_file("src/helpers.ts") ← 探索 +[message] "我发现这个函数被3处引用..." ← 短文本 +[tool_call] edit_file("src/utils.ts", ...) ← 编辑(用户需要看到) +[message] "已完成重构,主要变更是..." ← 最终回复 +``` + +理想的折叠效果:前 5 个块合并成一张"已探索 · 3 文件 1 搜索"的卡片,`edit_file` 和最终回复独立展示。 + +--- + +## 2. 整体架构 + +### 2.1 分层设计 + +``` +┌──────────────────────────────────┐ +│ UI 渲染层 │ 折叠卡片组件、展开/收起动画、状态持久化 +├──────────────────────────────────┤ +│ 分组服务层 │ 策略调度、分组循环、SubAgent 递归 +├──────────────────────────────────┤ +│ 策略层 │ 各模式的折叠规则(策略模式) +├──────────────────────────────────┤ +│ 块转换层 │ ACP 事件流 → RenderBlock 转换、聚合 +├──────────────────────────────────┤ +│ ACP 协议层 │ 流状态机、事件接收、会话管理 +├──────────────────────────────────┤ +│ 类型与分类层 │ Block 类型定义、工具分类常量 +└──────────────────────────────────┘ +``` + +各层职责边界清晰: + +- **类型与分类层**:纯数据定义,无业务逻辑。定义 Block 类型、工具分类常量、接口类型。 +- **ACP 协议层**:管理会话生命周期和流状态(Initial → Streaming → Completed 等),接收并存储原始 ACP 事件。 +- **块转换层**:将 ACP 事件流转换为 RenderBlock 列表(流式聚合、原位更新、SubAgent 分离)。 +- **策略层**:封装"什么块该折叠、分组何时结束"的判断规则,不关心如何遍历和组装。 +- **分组服务层**:执行分组循环(遍历 + flush),调用策略层做判断,计算分组元数据。 +- **UI 渲染层**:根据分组结果渲染卡片,管理展开/收起交互和状态持久化。 + +### 2.2 数据流 + +``` +ACP 事件流 (session/update) + │ + ▼ +[状态机] 更新 ACPStreamState,存储 progress + │ + ▼ +[块转换] computeBlocks: progress[] → ACPRenderBlock[] + │ + ▼ +[策略选择] 根据会话模式 + 执行模式选择折叠策略 + │ + ▼ +[预处理] 扫描所有 blocks,标记"受保护"的最后消息索引 + │ + ▼ +[分组循环] 逐块遍历,判断折叠/不折叠,连续可折叠块合并为 Group + │ + ▼ +[元数据计算] 为每个 Group 计算 isExploring / toolsSummary / isThinkingOnly 等 + │ + ▼ +[SubAgent 递归] 对嵌套的子 Agent 内部 blocks 递归执行同样的分组 + │ + ▼ +分组后的 Block 列表 (Group | 原始Block) + │ + ▼ +[UI 渲染] Group → 折叠卡片; 原始 Block → 正常渲染 +``` + +--- + +## 3. ACP 协议与数据流 + +Block Grouping 的输入不是凭空产生的,它依赖上游的 **ACP (Agent Client Protocol)** 协议。本章描述从 ACP 事件流到分组输入的完整数据管线。 + +### 3.1 ACP 协议概述 + +ACP 是 Agent 与 Client 之间的通信协议。Agent 在处理请求时通过 `session/update` 事件流式推送进度,Client 接收后转换为 UI 可渲染的 Block 列表。 + +**通信模型**: + +``` +Client (IDE) Agent (后端) + │ │ + │──── session/new ────────────────────►│ 创建会话 + │◄─── { sessionId } ─────────────────│ + │ │ + │──── session/prompt ─────────────────►│ 发送用户消息 + │ │ + │◄─── session/update ─────────────────│ Agent 流式推送: + │◄─── session/update ─────────────────│ 思考、工具调用、消息... + │◄─── session/update ─────────────────│ (多次) + │ │ + │ [可选:权限请求] │ + │◄─── session/request-permission ─────│ 工具需要用户确认 + │──── permission response ────────────►│ 用户批准/拒绝 + │ │ + │◄─── session/prompt response ────────│ 本轮结束(含 stopReason) + │ │ + │──── session/prompt ─────────────────►│ 下一轮... +``` + +### 3.2 session/update 事件类型 + +每个 `session/update` 事件携带一个 `SessionUpdate`,通过 `sessionUpdate` 字段区分类型: + +| sessionUpdate 类型 | 含义 | 转换为的 Block 类型 | +|-------------------|------|---------------------| +| `user_message_chunk` | 用户消息片段 | `user_message` | +| `agent_message_chunk` | Agent 文本回复片段 | `message` | +| `agent_thought_chunk` | Agent 思考过程片段 | `thought` | +| `tool_call` | 工具调用(创建) | `tool_call` | +| `tool_call_update` | 工具调用状态更新 | `tool_call`(原位更新) | +| `plan` | 任务计划变更 | `plan` | +| `memory_reference` | 记忆召回 | `memory_reference` | +| `notification` | 各类通知 | `notification` | +| `current_mode_update` | 模式切换通知 | (不产生 Block) | + +**session/prompt 响应**同样会产生 Block: + +| 事件 | 含义 | 转换为的 Block 类型 | +|------|------|---------------------| +| `session/prompt` response | 本轮结束 | `request_end` | + +### 3.3 事件元数据 (_meta) + +每个 ACP 事件携带 `_meta` 字典,提供额外上下文信息。对分组有影响的关键 meta 字段: + +| Meta Key | 含义 | 对分组的影响 | +|----------|------|-------------| +| `request-id` | 请求 ID(标识本轮对话) | 用于按轮次保护最后消息 | +| `tool-kind` | 工具类别(READ/EDIT/SEARCH/...) | 直接参与工具分类判断 | +| `tool-name` | 工具名称 | 参与工具分类判断(toolKind 的 fallback) | +| `file-change-mode` | 文件变更模式(Create/Modify/Delete) | Quest 模式下区分编辑操作类型 | +| `tool-call-internal-status` | 业务层工具状态 | 判断工具是否完成/失败 | +| `tool-call-error-code/message` | 工具错误信息 | 判断是否为错误工具(折叠进分组) | +| `streamed` | 消息是否流式传输 | 决定 message chunk 聚合策略 | +| `thinking-duration-millis` | 思考时长 | 传入 thought block 供分组时长计算 | +| `parent-tool-call-id` | 父工具调用 ID(SubAgent) | 关联子 Agent 数据到父 block | +| `sub-session-id` | 子会话 ID | SubAgent 分组使用 | + +### 3.4 ACP 流状态机 + +会话的流状态(`ACPStreamState`)直接影响分组算法中的多个判断: + +``` + ┌──────────┐ + │ Initial │ ← 空会话,等待用户首次输入 + └────┬─────┘ + │ 用户发送消息 + ▼ + ┌──────────┐ + │Prompting │ ← 消息已提交,等待 Agent 开始响应 + └────┬─────┘ + │ 收到第一个 session/update + ▼ + ┌──────────┐ + ┌──────│Streaming │ ← Agent 流式响应中(持续收到 update) + │ └────┬─────┘ + │ │ 收到权限请求 + ▼ │ + ┌──────────┐ │ + │Suspended │◄─────┘ ← 暂停,等待用户操作(权限确认等) + └────┬─────┘ + │ 用户确认后继续 + └──────┬─────┘ + │ session/prompt 返回 stopReason + ┌───┴────┬──────────┐ + ▼ ▼ ▼ + ┌──────────┐ ┌──────────┐ ┌───────┐ + │Completed │ │Cancelled │ │ Error │ ← 终止态 + └──────────┘ └──────────┘ └───────┘ + │ │ + └────── 用户再次发送 ────┘ + │ + ┌──────────┐ + │Prompting │ ← 新一轮开始 + └──────────┘ +``` + +**流状态对分组的影响**: + +| 状态 | 对分组的影响 | +|------|-------------| +| `Streaming` | 最后一个 group 可能 `isExploring=true`;当前轮次的最后消息不受保护 | +| `Suspended` | 同 Streaming(等待用户操作,Agent 还没结束) | +| `Prompting` | 同 Streaming(等待 Agent 开始,用户刚发了新消息) | +| `Completed` / `Cancelled` / `Error` | 所有 group 的 `isExploring=false`;所有轮次的最后消息受保护;`config.isCompleted=true` | +| `Initial` | 无 blocks,分组不执行 | + +### 3.5 从 ACP 事件到 RenderBlock 的转换 + +`computeBlocks()` 是一个纯函数,负责将 `ACPChatProgress[]` 转换为 `ACPRenderBlock[]`。这是分组算法的直接上游。 + +**核心转换逻辑**: + +``` +function computeBlocks(progressList): + blocks = [] + + // 聚合缓冲区 + currentMessageChunks = [] // 流式消息片段缓冲 + currentThoughtChunks = [] // 思考片段缓冲 + userMessageMap = Map() // 用户消息按 requestId 聚合 + + for each progress in progressList: + requestId = progress.meta['request-id'] + + switch progress.update.sessionUpdate: + + case 'user_message_chunk': + // 按 requestId 聚合(一条用户消息可能多个 chunk) + userMessageMap.get(requestId).push(chunk) + + case 'agent_message_chunk': + flushUserMessages() // 先输出已聚合的用户消息 + flushThoughts() // 先输出已聚合的思考 + if chunk.isStreamed: + currentMessageChunks.push(chunk) // 流式聚合 + else: + flushMessages() // 非流式:先输出之前的,再单独输出 + blocks.push(MessageBlock(chunk)) + + case 'agent_thought_chunk': + flushUserMessages() + flushMessages() // 思考到来时先输出已聚合的消息 + currentThoughtChunks.push(chunk) + // 记录 startTime、endTime、durationMillis + + case 'tool_call': + flushAll() // 工具调用到来时先输出所有缓冲 + existingBlock = blocks.findById(toolCallId) + if existingBlock: + existingBlock.update(toolCall) // 原位更新 + else: + blocks.push(ToolCallBlock(toolCall)) + + case 'tool_call_update': + flushAll() + existingBlock = blocks.findById(toolCallId) + existingBlock.updateResult(result) // 原位更新状态和结果 + + case 'plan': + flushAll() + blocks.push(PlanBlock(entries, isFirstUpdate)) + + case 'session/prompt' (stopReason): + flushAll() + blocks.push(RequestEndBlock(requestId)) + + return blocks +``` + +**关键设计**: + +1. **流式聚合**:连续的 `agent_message_chunk`(且 `isStreamed=true`)聚合为一个 `MessageBlock`。连续的 `agent_thought_chunk` 聚合为一个 `ThoughtBlock`。遇到不同类型的 update 时先 flush 缓冲区。 + +2. **原位更新**:`tool_call` 和 `tool_call_update` 不会创建新 block,而是查找已有的 `ToolCallBlock` 并原位更新其 `result`、`status` 等字段。这保证了 blocks 数组中工具调用的位置不变。 + +3. **全量计算**:每次 progress 数组变化时,从头遍历全部 progress 重新计算 blocks(而非增量计算)。这保证了正确性,虽然看起来效率较低,但 progress 数组的长度通常可控。 + +4. **meta 提取**:每个 block 从 ACP 事件的 `_meta` 中提取 `toolKind`、`toolName`、`fileChangeMode` 等字段,存储在 block 上供后续分组策略使用。 + +### 3.6 SubAgent 数据流 + +SubAgent 的事件也通过 ACP 流推送,但带有 `parent-tool-call-id` meta 标记。数据流特殊处理: + +``` +收到 session/update + │ + ├── 无 parent-tool-call-id → 主会话事件,正常处理 + │ + └── 有 parent-tool-call-id → SubAgent 事件 + │ + ├── 分离到 subAgentProgressMap + │ + ├── 路由到 SubAgentService(独立计算 SubAgent 的 blocks) + │ + └── SubAgent 的 blocks 关联到父级 ToolCallBlock.subAgentState + │ + └── 分组时递归处理(§5.6) +``` + +### 3.7 全链路数据流总览 + +``` +┌─────────────────────────────────────────────────────────────────────┐ +│ ACP 事件流 │ +│ WebSocket / IPC 接收 session/update 事件 │ +└──────────────────────────┬──────────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────────────┐ +│ 状态机处理 │ +│ ACPProgressStateMachine: 更新 ACPStreamState │ +│ progress 追加到 session.acpStream.progress[] │ +└──────────────────────────┬──────────────────────────────────────────┘ + │ progress[] 变化触发 + ▼ +┌─────────────────────────────────────────────────────────────────────┐ +│ 块转换计算 (computeBlocks) │ +│ 1. 分离主会话 / SubAgent progress │ +│ 2. 全量遍历 progress[],聚合 + 转换为 ACPRenderBlock[] │ +│ 3. SubAgent blocks 关联到父级 ToolCallBlock │ +│ 输出: ACPRenderBlock[] │ +└──────────────────────────┬──────────────────────────────────────────┘ + │ blocks[] 变化触发 + ▼ +┌─────────────────────────────────────────────────────────────────────┐ +│ 分组服务 (BlockGroupingService) │ +│ 1. 选择策略(按 sessionType + executionMode) │ +│ 2. 预处理:标记受保护的最后消息 │ +│ 3. 核心循环:逐块判断折叠,连续可折叠块合并为 ActivityGroup │ +│ 4. 元数据计算:isExploring / toolsSummary / isThinkingOnly │ +│ 5. SubAgent 递归:对嵌套子 Agent 的 blocks 递归执行分组 │ +│ 输出: GroupedRenderBlock[] (ActivityGroup | ACPRenderBlock) │ +└──────────────────────────┬──────────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────────────┐ +│ UI 渲染 │ +│ ActivityGroup → ActivityGroupCard(折叠卡片) │ +│ ACPRenderBlock → 正常 Block 渲染 │ +└─────────────────────────────────────────────────────────────────────┘ +``` + +**触发时机**:整个管线是**响应式**的——每当 ACP 事件到达,状态机更新 progress 数组和 streamState,这触发 `computeBlocks` 重新计算 blocks,进而触发 `useBlockGrouping` 重新分组,最终触发 UI 重渲染。在流式响应过程中,这个管线会高频执行(每收到一个 event 就走一遍)。 + +### 3.8 分组的重算策略 + +由于 `computeBlocks` 是全量计算,分组也随之全量重算: + +- **每次 progress 变化**:重新从全部 progress 计算 blocks → 重新从全部 blocks 执行分组 +- **blocks 引用变化**:React `useMemo` 的依赖项包含 blocks 数组引用,当 blocks 变化时重新分组 +- **streamState 变化**:流状态变化也会触发重新分组(因为它影响 isCompleted、isExploring、保护策略) + +**性能考量**: +- `computeBlocks` 是 O(n) 的纯函数,n 为 progress 数量 +- 分组循环也是 O(n),n 为 blocks 数量 +- 对于典型对话(几十到几百个 blocks),这个开销可以忽略 +- 如果需要优化,可以在 `computeBlocks` 层引入增量计算(但目前全量计算已足够) + +### 3.9 权限请求与分组的交互 + +ACP 支持工具执行前的权限请求(human-in-the-loop): + +``` +时间线: + Agent 发起 tool_call (run_in_terminal) + │ + ▼ + Client 收到 session/request-permission + │ + ├── StreamState → Suspended + │ + ├── toolCallId 加入 excludeToolCallIds + │ → 该工具不被折叠,等待用户确认 + │ + ▼ + 用户点击"允许" / "拒绝" + │ + ├── toolCallId 从 excludeToolCallIds 移除 + ├── toolCallId 加入 confirmedToolCallIds + │ + ├── StreamState → Streaming (Agent 继续) + │ + └── 根据 foldAfterConfirmation 配置决定: + ├── false: 已确认的工具保持独立展示 + └── true: 已确认的工具按正常规则判断是否折叠 +``` + +--- + +## 4. 核心概念 + +### 3.1 Block 类型 + +对话流中的每个元素称为一个 Block。每个 Block 都有一个 `type` 字段和一个 `requestId` 字段(标识它属于哪次请求的响应)。 + +| 类型 | 含义 | 关键属性 | +|------|------|----------| +| **tool_call** | 工具调用 | toolName, toolKind, toolCallId, status, result, isSubAgent, fileChangeMode | +| **thought** | 模型思考过程 | chunks[], startTime, endTime, durationMillis | +| **message** | 文本消息 | chunks[](每个 chunk 含 content) | +| **user_message** | 用户消息 | (用于划分对话轮次) | +| **plan** | 任务计划 | isFirstUpdate(是否首次创建) | +| **request_end** | 请求结束标记 | (不折叠) | +| **notification** | 通知消息 | (不折叠) | +| **memory_reference** | 记忆引用 | (不折叠) | + +### 3.2 工具分类 + +将工具按"用户需要感知的程度"分为几类,这是折叠判断的基础。分类通过两种方式判断:按工具名(toolName)或按工具类别(toolKind)。 + +| 分类 | 特征 | 折叠倾向 | toolKind | 典型 toolName | +|------|------|----------|----------|---------------| +| **探索性工具** | 只读、信息收集,不影响文件系统 | 折叠 | READ, SEARCH, FETCH | read_file, list_dir, search_codebase, search_symbol, search_file, grep_code, fetch_rules, search_memory, get_problems | +| **编辑工具** | 修改文件系统,需要用户审查 | 不折叠 | EDIT, DELETE, MOVE | create_file, edit_file, search_replace, delete_file | +| **确认工具** | 需要用户交互或确认 | 不折叠 | ASK_USER_QUESTION, EXECUTE | run_in_terminal, ask_user, run_preview, switch_mode | +| **TODO 工具** | 任务管理操作 | 不折叠 | — | add_tasks, update_tasks, view_tasklist | +| **网页工具** | 网络搜索和抓取 | 视策略而定 | FETCH | search_web, fetch_content | +| **记忆工具** | 上下文记忆操作 | 视策略而定 | — | update_memory, search_memory | + +**双重判断机制**:工具分类优先通过 `toolKind` 判断(更通用),fallback 到 `toolName` 判断(具体工具名)。这样当新增工具时,只要 toolKind 正确就能自动归入正确的分类,无需修改分组代码。 + +### 3.3 工具状态 + +每个 tool_call block 都有执行状态,这影响折叠判断: + +| 状态 | 含义 | 判断方式 | +|------|------|----------| +| **pending** | 等待执行 / 执行中 | status='pending' 或无明确完成状态 | +| **completed** | 执行成功 | status='completed' 或 internalStatus=FINISHED | +| **error** | 执行失败 | status='failed' 或 internalStatus=ERROR/CANCELLED | + +**关键设计**:错误的工具调用也会被折叠(而不是独立展示),因为失败的操作对用户来说是噪音,错误详情可以在展开分组后查看。 + +### 3.4 编辑工具的操作类型 + +编辑工具有一个额外的 `fileChangeMode` 属性,区分操作类型: + +| fileChangeMode | 含义 | 在自主模式下的折叠行为 | +|----------------|------|------------------------| +| **MODIFY** | 修改已有文件 | 折叠(Agent 频繁修改同文件是常态) | +| 其他(新增/删除) | 创建或删除文件 | 不折叠(文件级别的变更用户需要感知) | + +### 3.5 ActivityGroup(活动分组) + +连续的可折叠 blocks 合并后的产物。它与原始 Block 共享同一个输出列表类型: + +``` +GroupedBlock = ActivityGroup | OriginalBlock +``` + +ActivityGroup 携带以下元数据: + +| 字段 | 类型 | 含义 | +|------|------|------| +| `type` | `'activity-group'` | 标识这是一个分组 | +| `blocks` | Block[] | 分组内的原始 block 列表 | +| `id` | string | 分组唯一标识,格式:`{requestId}-activity-group-{startIndex}` | +| `requestId` | string | 所属请求 ID | +| `startIndex` / `endIndex` | number | 在原始列表中的位置范围 | +| `isExploring` | boolean | 分组是否正在进行中(详见 §5.5) | +| `isThinkingOnly` | boolean | 分组是否只包含思考块和短文本(详见 §7.2) | +| `isEditOnly` | boolean | 分组是否只包含同文件编辑(详见 §7.3) | +| `editFilePath` | string? | 当 isEditOnly 时,编辑的文件路径 | +| `toolsSummary` | ToolsSummary | 工具使用统计(详见 §7.1) | +| `activityOrder` | ActivityType[] | 活动类型出现顺序 | +| `hasErrorTool` | boolean | 分组内是否有执行失败的工具调用 | +| `thinkingDurationSeconds` | number? | 思考总时长(秒)(详见 §7.4) | +| `sessionType` | enum | 所属会话类型(影响标签文案) | + +### 3.6 ActivityType(活动类型) + +将 block 映射到活动类型,用于 toolsSummary 统计和 activityOrder 记录: + +| ActivityType | 对应 block/tool | +|-------------|-----------------| +| `thinking` | thought 块 | +| `text` | message 块(短文本) | +| `file` | toolKind=READ 或 read_file | +| `list` | list_dir | +| `search` | toolKind=SEARCH 或各种 search_* | +| `web_search` | toolKind=FETCH 或 search_web, fetch_content | +| `edit` | toolKind=EDIT/DELETE/MOVE 或 create_file, edit_file 等 | +| `terminal` | toolKind=EXECUTE 或 run_in_terminal | +| `todo` | update_tasks | +| `memory` | search_memory, update_memory | +| `problems` | get_problems | + +映射优先级:**先查 toolKind,再查 toolName**。toolKind 是更抽象的分类,toolName 是具体工具名。 + +--- + +## 4. 策略模式设计 + +### 4.1 策略接口 + +每种折叠策略需要实现四个核心方法: + +``` +interface IGroupingStrategy { + name: string // 策略名称 + supportedSessionTypes: enum[] // 支持的会话类型列表 + + // 判断单个 block 是否应该被折叠 + shouldFoldBlock(block, context, isInGroup, previousBlocks) → FoldDecision + + // 判断当前分组是否应该在此处结束(在 shouldFold=true 时才调用) + shouldEndGroup(currentGroup, nextBlock, context) → boolean + + // 构建分组的工具使用统计 + buildToolsSummary(blocks, context) → ToolsSummary + + // 获取 block 的活动类型分类 + getActivityType(block) → ActivityType | null +} +``` + +**FoldDecision**: + +``` +{ + shouldFold: boolean // 是否应该折叠 + reason: string // 原因(仅调试用,如 'exploration_tool', 'short_text') + activityType: string? // 如果折叠,它的活动类型是什么 +} +``` + +**GroupingContext**(分组上下文,传递给策略方法): + +``` +{ + sessionId: string // 会话 ID + sessionType: enum // 会话类型 + streamState: enum // 流状态(Streaming/Completed/Cancelled/Error/...) + config: GroupingConfig // 分组配置 + executionMode: 'vibe' | 'execute' // 执行模式(仅自主模式有效) + subAgentContext: { // 仅在 SubAgent 内部分组时存在 + isSubAgent: true + subAgentType: string // SubAgent 类型 + parentSessionType: enum + parentToolCallId: string + nestLevel: number // 嵌套层级 + }? +} +``` + +### 4.2 策略继承体系 + +``` +IGroupingStrategy (接口) + │ +Base (基础策略,提供完整默认实现) + │ + ├── Assistant (对话模式) + │ 覆盖: shouldFoldToolCall, shouldFoldThought + │ 差异: 更积极折叠网页/记忆/终端输出工具 + │ + └── Quest (自主任务模式) + 覆盖: shouldFoldBlock, shouldFoldToolCall, shouldFoldThought, shouldEndGroup + 差异: 编辑工具区分 modify/新增、同文件编辑聚合 + │ + └── Vibe (快速执行模式) + 覆盖: shouldFoldToolCall, shouldEndGroup + 差异: 除 SubAgent 外几乎全部折叠,不区分文件 +``` + +每个子策略只需要覆盖有差异的方法,其余行为继承自父类。 + +### 4.3 策略选择算法 + +``` +function selectStrategy(sessionType, executionMode): + if sessionType == QUEST and executionMode == 'vibe': + return strategyRegistry.getByName('vibe') // 按名称查找,不按 sessionType + return strategyRegistry.getBySessionType(sessionType) + ?? defaultStrategy // fallback 到 Base +``` + +**设计细节**:Vibe 策略不注册 `supportedSessionTypes`(设为空数组),只通过名称查找。这样它不会覆盖 Quest 策略在 sessionType 注册表中的位置,两者可以共存。 + +### 4.4 策略注册 + +服务启动时注册所有内置策略: + +``` +strategyMap: Map // 按 sessionType 索引 +strategyNames: Map // 按 name 索引 + +register(strategy): + for each sessionType in strategy.supportedSessionTypes: + strategyMap.set(sessionType, strategy) + strategyNames.set(strategy.name, strategy) +``` + +--- + +## 5. 分组算法详解 + +### 5.1 总体流程 + +``` +function groupBlocks(blocks, sessionId, sessionType, streamState, config, executionMode): + // 1. 合并配置 + mergedConfig = { ...DEFAULT_CONFIG, ...config } + mergedConfig.isCompleted = isTerminalState(streamState) // Completed/Cancelled/Error + + // 2. 创建上下文 + context = { sessionId, sessionType, streamState, config: mergedConfig, executionMode } + + // 3. 选择策略 + strategy = selectStrategy(sessionType, executionMode) + + // 4. 执行核心分组循环 + groupedBlocks = executeGrouping(blocks, strategy, context) + + // 5. 递归处理 SubAgent + return processSubAgentBlocks(groupedBlocks, ...) +``` + +### 5.2 核心分组循环(executeGrouping) + +这是整个算法的核心,是一次 O(n) 的线性扫描: + +``` +function executeGrouping(blocks, strategy, context): + result = [] + currentGroup = [] // 当前正在累积的分组 + groupStartIndex = 0 // 当前分组的起始索引 + groupRequestId = '' // 当前分组的请求 ID + isFlushingFinalGroup = false // 标记是否正在 flush 最后一个分组 + + // 预处理:找出受保护的"最后消息"索引 + protectedIndices = findProtectedMessageIndices(blocks, context) + + // 定义 flush 函数 + function flushGroup(): + if currentGroup is empty: return + + if currentGroup.length == 1: + // 单个块:再次判断是否真的该折叠 + singleBlock = currentGroup[0] + decision = strategy.shouldFoldBlock(singleBlock, context, isInGroup=false, previousBlocks=[]) + if decision.shouldFold: + group = createActivityGroup(currentGroup, ..., isFlushingFinalGroup) + result.push(group) + else: + result.push(singleBlock) // 不折叠,原样输出 + else: + // 多个块:创建分组 + group = createActivityGroup(currentGroup, ..., isFlushingFinalGroup) + result.push(group) + + currentGroup = [] + + // 主循环 + for i = 0 to blocks.length - 1: + block = blocks[i] + isInGroup = currentGroup.length > 0 + + // 受保护的最后消息:强制不折叠 + if block.type == 'message' AND protectedIndices.has(block.requestId, i): + isFlushingFinalGroup = false + flushGroup() + result.push(block) + continue + + // 调用策略判断 + decision = strategy.shouldFoldBlock(block, context, isInGroup, blocks[0..i-1]) + + if decision.shouldFold: + // 在加入分组之前,检查是否需要先结束当前分组 + if isInGroup AND strategy.shouldEndGroup(currentGroup, block, context): + isFlushingFinalGroup = false + flushGroup() + + // 加入分组 + if currentGroup is empty: + groupStartIndex = i + groupRequestId = block.requestId + currentGroup.push(block) + else: + // 不折叠:先 flush 当前分组,再输出原始块 + isFlushingFinalGroup = false + flushGroup() + result.push(block) + + // 处理最后的分组 + isFlushingFinalGroup = true // 标记这是最后一个 flush + flushGroup() + + return result +``` + +**关键细节**: + +1. **`isFlushingFinalGroup` 标记**:这个标记传递给 `createActivityGroup`,用于计算 `isExploring`。只有最后一个 group 才可能处于"进行中"状态。在循环中间的每次 flush,这个标记都是 false;只有在循环结束后的最后一次 flush,才设为 true。 + +2. **单个块的二次判断**:当 currentGroup 只有 1 个块时,flush 会再次调用 `shouldFoldBlock`(此时 `isInGroup=false`)。这是因为有些块(如 message)只在已有分组时才折叠(`isInGroup=true` 时折叠短文本),单独存在时不该折叠。 + +3. **shouldEndGroup 只在 shouldFold=true 时调用**:如果当前块不该折叠,直接 flush 即可,不需要额外判断分组边界。shouldEndGroup 只在"这个块该折叠,但需要检查是否放入当前组还是开始新组"时才调用。 + +### 5.3 "最后消息"保护策略 + +**设计目标**:保证每轮对话中 Agent 的最后一段文本回复始终可见,不被折叠。 + +**为什么需要这个**:Agent 的回复模式通常是:思考 → 工具调用 → 思考 → 工具调用 → ... → 最终文本回复。中间的短文本消息(如"我来看看这个文件...")可能被折叠,但最终的回复不应该被折叠。 + +**算法**: + +``` +function findProtectedMessageIndices(blocks, context): + protectedIndices = Map + + // 1. 找出所有 user_message 的位置,划分对话轮次 + userMessagePositions = [i for i in 0..blocks.length where blocks[i].type == 'user_message'] + + // 2. 遍历每个对话轮次 + for roundIndex = 0 to userMessagePositions.length - 1: + roundStart = userMessagePositions[roundIndex] + roundEnd = (roundIndex < last) ? userMessagePositions[roundIndex + 1] - 1 : blocks.length - 1 + + // 判断是否需要保护这一轮 + isLastRound = (roundIndex == userMessagePositions.length - 1) + isSessionInProgress = streamState in [Streaming, Prompting, Suspended] + + shouldProtect = context.isCompleted // 会话已结束 → 保护所有 + OR (isSessionInProgress AND NOT isLastRound) // 进行中 → 只保护非最后轮 + + if shouldProtect: + // 从后往前找该轮次的最后一个 message block + for i = roundEnd downto roundStart: + if blocks[i].type == 'message' AND blocks[i].requestId exists: + protectedIndices.set(blocks[i].requestId, i) + break + + return protectedIndices +``` + +**为什么进行中时不保护最后一轮**: + +考虑流式场景。Agent 边执行边输出,此时最后一轮还在进行中。如果保护当前轮的"最后 message",那么每当新的 message 块到达时,之前的"最后 message"就不再是最后的了——但它已经被保护显示出来。这会导致 UI 频繁跳动(一个 message 先被独立展示,后来又被折叠进分组)。 + +所以**只保护已完成轮次**的最后消息,当前轮的消息允许被折叠(因为后续可能还有新消息)。当会话结束时,再保护所有轮次。 + +### 5.4 flush 与 ActivityGroup 创建 + +当 `currentGroup` 被 flush 时,需要创建 ActivityGroup 并计算各种元数据: + +``` +function createActivityGroup(blocks, startIndex, requestId, strategy, context, isLastGroup): + // 1. 收集活动类型顺序 + activityOrder = [] + for block in blocks: + type = strategy.getActivityType(block) + if type != null: activityOrder.push(type) + + // 2. 构建工具汇总 + toolsSummary = strategy.buildToolsSummary(blocks, context) + + // 3. 计算 thinking 持续时间 + thinkingDurationSeconds = calculateThinkingDuration(blocks) + + // 4. 判断 isExploring + hasRunningTool = any block in blocks where: + block.type == 'tool_call' + AND NOT block.isCompleted + AND NOT block.isError + isExploring = isLastGroup + AND NOT context.config.isCompleted + AND (streamState == Streaming OR hasRunningTool) + + // 5. 判断 isThinkingOnly + hasThinking = any block.type == 'thought' + allThinkingOrMessage = every block.type in ['thought', 'message'] + isThinkingOnly = blocks.length > 0 AND hasThinking AND allThinkingOrMessage + + // 6. 判断 isEditOnly + (isEditOnly, editFilePath) = checkEditOnlyGroup(blocks) + + // 7. 检查是否有错误工具 + hasErrorTool = any block in blocks where: + block.type == 'tool_call' + AND NOT block.isSubAgent + AND NOT block.toolKind in [SUB_AGENT, SWITCH_MODE] + AND block.isError + + return ActivityGroup { ... } +``` + +### 5.5 isExploring 判断逻辑详解 + +这是一个关键状态,直接影响卡片的 UI 表现(是否显示"进行中"动画、是否自动展开)。 + +**三个条件必须同时满足**: + +1. **是最后一个 group**:通过 `isLastGroup` 参数判断。在循环中间 flush 的 group 的 `isLastGroup=false`,只有循环结束后 flush 的最后一个 group 为 `true`。 + - **为什么**:如果后面还有其他 blocks(不管是原始 block 还是另一个 group),说明这个 group 已经结束了。只有"位于末尾的 group"才可能还在进行中。 + +2. **会话未结束**:`isCompleted` 为 false,即 streamState 不是 Completed/Cancelled/Error。 + - **为什么**:会话已结束意味着所有操作都完成了。 + +3. **组内有至少一个工具仍在执行中**:遍历组内所有 tool_call block,如果有任何一个既不是 completed 也不是 error,则认为还在执行。 + - **为什么**:即使是最后一个 group 且会话在 streaming,如果组内所有工具都已完成,那这个 group 也是"已完成"的。 + - **注意**:thought 块不影响 running 状态判断,因为 thinking 没有独立的完成状态信号。 + +**isExploring 的状态转换**: + +``` + ┌─────────────┐ + │ isExploring │ + │ = true │ + └──────┬──────┘ + │ 以下任一条件触发: + │ - 新的不可折叠 block 到达(group 不再是最后一个) + │ - 会话结束 + │ - 组内所有工具执行完毕 + ▼ + ┌─────────────┐ + │ isExploring │ + │ = false │ + └─────────────┘ +``` + +### 5.6 SubAgent 递归处理 + +Agent 可以调用子 Agent (SubAgent),子 Agent 内部也有自己的 block 列表。分组算法需要递归处理。 + +**递归算法**: + +``` +function processSubAgentBlocks(groupedBlocks, sessionType, config, executionMode, nestLevel=0): + return groupedBlocks.map(block => + if block.type == 'tool_call' AND block.isSubAgent AND block.subAgentState: + // 获取子 Agent 类型 + agentType = block.subAgentState.agentType ?? 'general_purpose' + + // 查询是否有定制配置 + configOverride = subAgentConfigRegistry.get(agentType, sessionType, nestLevel) + + // 确定策略 + if configOverride.useCustomStrategy: + strategy = strategyRegistry.getByName(configOverride.customStrategyName) + else: + strategy = strategyRegistry.getBySessionType(sessionType) // 默认用父级策略 + + // 合并配置 + subConfig = { ...config, ...configOverride.configOverrides } + subConfig.isCompleted = isTerminalState(block.subAgentState.streamState) + + // 创建子上下文 + subContext = { + ...context, + sessionId: block.subAgentState.subSessionId, + subAgentContext: { + isSubAgent: true, + subAgentType: agentType, + parentSessionType: sessionType, + parentToolCallId: block.subAgentState.parentToolCallId, + nestLevel: nestLevel, + } + } + + // 执行分组 + subGroupedBlocks = executeGrouping(block.subAgentState.blocks, strategy, subContext) + + // 递归处理嵌套的 SubAgent(nestLevel + 1) + subGroupedBlocks = processSubAgentBlocks(subGroupedBlocks, ..., nestLevel + 1) + + // 返回更新后的 block + return { ...block, subAgentState: { ...block.subAgentState, groupedBlocks: subGroupedBlocks } } + else: + return block + ) +``` + +**设计点**: + +- SubAgent 的 block 列表存储在 `tool_call.subAgentState.blocks` 中 +- 分组后的结果写入 `subAgentState.groupedBlocks`(不修改原始 blocks) +- 默认使用父级策略,但支持通过 `ISubAgentGroupingConfigProvider` 注册定制 +- `nestLevel` 追踪嵌套层级,传递给策略供参考 + +--- + +## 6. 策略折叠规则详解 + +### 6.1 shouldFoldBlock 的判断流程 + +所有策略共享一个 **通用前置检查**(在 Base 策略中实现,子类通常不覆盖这部分): + +``` +function shouldFoldBlock(block, context, isInGroup, previousBlocks): + // ===== 通用前置检查(仅对 tool_call 类型)===== + if block.type == 'tool_call': + toolCallId = block.toolCallId + + // 优先级 1:待用户确认 → 不折叠 + if toolCallId in config.excludeToolCallIds: + return { shouldFold: false, reason: 'pending_confirmation' } + + // 优先级 2:模式切换工具 → 不折叠 + if block.toolKind in [SWITCH_MODE, SWITCH_MODE_ENTER]: + return { shouldFold: false, reason: 'switch_mode' } + + // 优先级 3:SubAgent 工具 → 不折叠 + if block.isSubAgent OR block.toolKind == SUB_AGENT OR block.toolName == 'task': + return { shouldFold: false, reason: 'subagent_tool' } + + // 优先级 4:执行出错 → 折叠(错误信息留在分组内) + if block.isError: + return { shouldFold: true, reason: 'tool_error', activityType: ... } + + // 优先级 5:已确认的工具 → 根据配置决定 + if toolCallId in config.confirmedToolCallIds: + if NOT config.foldAfterConfirmation: + return { shouldFold: false, reason: 'confirmed_no_fold' } + // else: 继续下面的分类判断 + + // ===== 按块类型分发 ===== + switch block.type: + case 'tool_call': return shouldFoldToolCall(block, context, isInGroup) + case 'thought': return shouldFoldThought(block, context, isInGroup) + case 'message': return shouldFoldMessage(block, context, isInGroup) + case 'user_message': return { shouldFold: false } + case 'plan': return handlePlanBlock(block, context) + case 'request_end': return { shouldFold: false } + default: return { shouldFold: false } +``` + +### 6.2 Base 策略的 shouldFoldToolCall + +``` +function shouldFoldToolCall(block, context, isInGroup): + if NOT config.groupToolCalls: return { shouldFold: false } + + toolInfo = extractToolInfo(block) + + // 编辑工具 → 不折叠 + if isEditingTool(toolInfo): return { shouldFold: false } + + // 确认工具 → 不折叠 + if isConfirmationTool(toolInfo): return { shouldFold: false } + + // TODO 工具 → 不折叠 + if isTodoTool(toolInfo): return { shouldFold: false } + + // 探索性工具 → 折叠 + if isExplorationTool(toolInfo): + return { shouldFold: true, activityType: mapToActivityType(toolInfo) } + + // 网页工具 → 根据配置 + if isWebTool(toolInfo): + return { shouldFold: config.groupSearchTools, activityType: 'web_search' } + + // 其他 → 不折叠(保守策略) + return { shouldFold: false } +``` + +### 6.3 Base 策略的 shouldFoldThought + +``` +function shouldFoldThought(block, context, isInGroup): + if NOT config.groupThinking: return { shouldFold: false } + return { shouldFold: true, activityType: 'thinking' } +``` + +**注意**:thinking 块可以**开启**一个新的分组(即使当前不在分组中)。但如果分组里只有 thinking(和短文本),UI 上会特殊处理(显示"思考了 Ns"而非"探索中")。 + +### 6.4 Base 策略的 shouldFoldMessage + +``` +function shouldFoldMessage(block, context, isInGroup): + if NOT config.groupText: return { shouldFold: false } + + // 关键:message 块不能开启分组,只能加入已有分组 + if NOT isInGroup: return { shouldFold: false } + + text = extractMessageText(block) + if isShortText(text): + return { shouldFold: true, activityType: 'text' } + return { shouldFold: false } +``` + +**短文本判断算法**: + +``` +function isShortText(text, maxLength=150, maxLines=2): + if text is null/undefined: return false + if text == '': return true + + // 包含代码块 → 不是短文本(代码块展开后可能很长) + if text contains '```' or '`': return false + + return text.length <= maxLength AND text.split('\n').length <= maxLines +``` + +**设计意图**:Agent 在工具调用间隙可能输出"让我看看这个文件..."之类的短消息。如果这段消息出现在一个正在累积的分组中间,就把它一起折叠进去。但如果它不在分组中(前面没有被折叠的块),就独立展示。 + +### 6.5 Base 策略的 shouldEndGroup + +``` +function shouldEndGroup(currentGroup, nextBlock, context): + if nextBlock is null: return true + + // 如果下一个块(在加入分组的语境下)不应该折叠,就结束 + decision = shouldFoldBlock(nextBlock, context, isInGroup=true, currentGroup) + return NOT decision.shouldFold +``` + +这是最简单的分组边界规则:连续的可折叠块就一直累积,遇到不可折叠块就断开。 + +### 6.6 Assistant 策略的覆盖 + +只覆盖 `shouldFoldToolCall` 和 `shouldFoldThought`: + +``` +// shouldFoldThought: 无条件折叠(不受 config.groupThinking 控制) +function shouldFoldThought(block, context, isInGroup): + return { shouldFold: true, activityType: 'thinking' } + +// shouldFoldToolCall: 在 Base 基础上增加几种折叠 +function shouldFoldToolCall(block, context, isInGroup): + ... // Base 的所有判断保持不变,额外增加: + + // 记忆搜索 → 折叠(记忆更新不折叠,因为它代表一个可见的操作) + if toolInfo.toolName == 'search_memory': + return { shouldFold: true, activityType: 'memory' } + + // 终端输出读取 → 折叠(这只是读取之前命令的输出) + if toolInfo.toolName == 'get_terminal_output': + return { shouldFold: true, activityType: 'terminal' } + + // 网页工具 → 无条件折叠(Base 是根据配置决定) + if isWebTool(toolInfo): + return { shouldFold: true, activityType: 'web_search' } +``` + +### 6.7 Quest 策略的覆盖 + +Quest 覆盖了 `shouldFoldBlock`、`shouldFoldToolCall`、`shouldFoldThought` 和 `shouldEndGroup`。 + +**shouldFoldBlock 覆盖**:增加 plan 块的处理 + +``` +function shouldFoldBlock(block, context, isInGroup, previousBlocks): + // 待确认的工具检查(与 Base 相同) + if block.type == 'tool_call' AND block.toolCallId in excludeList: + return { shouldFold: false } + + // tool_call 直接调用 Quest 的 shouldFoldToolCall(带 previousBlocks) + if block.type == 'tool_call': + return shouldFoldToolCall(block, context, isInGroup, previousBlocks) + + // plan 块特殊处理 + if block.type == 'plan': + if block.isFirstUpdate: + return { shouldFold: false } // 首次创建 → 展示 + return { shouldFold: true, activityType: 'todo' } // 后续更新 → 折叠 + + // 其他类型回退到 Base + return super.shouldFoldBlock(...) +``` + +**shouldFoldToolCall 覆盖**:编辑工具的 modify 区分 + +``` +function shouldFoldToolCall(block, context, isInGroup, previousBlocks): + if NOT config.groupToolCalls: return { shouldFold: false } + if isSubAgent(block): return { shouldFold: false } + + toolInfo = extractToolInfo(block) + + if toolInfo.toolKind in [SWITCH_MODE, SWITCH_MODE_ENTER]: + return { shouldFold: false } + + // 错误 → 折叠 + if toolInfo.isError: + return { shouldFold: true, activityType: ... } + + // 编辑工具 → 区分 modify 和 新增/删除 + if isEditingTool(toolInfo) OR isEditingKind(toolInfo): + if NOT toolInfo.isModify: + return { shouldFold: false } // 新增/删除 → 不折叠 + else: + return { shouldFold: true, activityType: 'edit' } // modify → 折叠 + + // 确认工具 → 不折叠 + if isConfirmationTool(toolInfo): return { shouldFold: false } + + // 探索性工具 → 折叠(同时支持 toolName 和 toolKind 判断) + if isExplorationTool(toolInfo) OR isExplorationKind(toolInfo): + return { shouldFold: true, activityType: ... } + + // 网页工具 → 仅 FETCH 类折叠(搜索结果页面不折叠) + if toolInfo.toolKind == FETCH: + return { shouldFold: true, activityType: 'web_search' } + + // 记忆工具 → 折叠(包括 update_memory,与 Assistant 不同) + if toolInfo.toolName in ['search_memory', 'update_memory']: + return { shouldFold: true, activityType: 'memory' } + + return { shouldFold: false } +``` + +**shouldEndGroup 覆盖**:同文件编辑聚合(Quest 最复杂的部分) + +``` +function shouldEndGroup(currentGroup, nextBlock, context): + if nextBlock is null: return true + + // SubAgent 到来 → 结束(SubAgent 需要独立展示为任务卡片) + if nextBlock.type == 'tool_call' AND isSubAgent(nextBlock): return true + + // 首次创建的 plan → 结束(计划需要独立展示) + if nextBlock.type == 'plan' AND nextBlock.isFirstUpdate: return true + + // ===== 编辑聚合逻辑 ===== + hasEditInGroup = 当前组内是否有可聚合的编辑工具(isModify 或 isError 的编辑) + editFilePath = 组内第一个能提取到路径的编辑工具的文件路径 + + if nextBlock.type == 'tool_call': + nextToolInfo = extractToolInfo(nextBlock) + isNextEditing = isEditingTool(next) OR isEditingKind(next) + isNextAggregable = isNextEditing AND (nextToolInfo.isModify OR nextToolInfo.isError) + + // 新增/删除文件 → 结束(不参与聚合,需要独立展示) + if isNextEditing AND NOT nextToolInfo.isModify AND NOT nextToolInfo.isError: + return true + + // 当前组无编辑 + 下一个是可聚合编辑 → 结束 + //(不让编辑混进前面的探索工具组,让编辑开始自己的组) + if NOT hasEditInGroup AND isNextAggregable: + return true + + // 当前组有编辑 + if hasEditInGroup: + // 下一个不是可聚合编辑 → 结束(编辑组不混入非编辑块) + if NOT isNextAggregable: return true + + // 下一个是可聚合编辑,检查文件路径 + if editFilePath is not null: + nextFilePath = extractFilePath(nextBlock) + if nextFilePath is not null AND editFilePath != nextFilePath: + return true // 不同文件 → 结束 + + // 同文件 或 路径无法提取(如失败的编辑)→ 继续聚合 + return false + + // 当前组有编辑 + 下一个不是 tool_call → 结束 + if hasEditInGroup: return true + + // 当前组无编辑 → 使用默认规则 + return super.shouldEndGroup(currentGroup, nextBlock, context) +``` + +**视觉化示例**(Quest 模式): + +``` +输入序列: + [read_file A] [search X] [edit_file A (modify)] [edit_file A (modify)] [edit_file B (modify)] [message] + +分组结果: + Group 1: [read_file A, search X] → "已探索 · 1文件 1搜索" + Group 2: [edit_file A, edit_file A] → "已执行 · 2编辑" (同文件A聚合) + Group 3: [edit_file B] → 平铺展示 (不同文件B独立) + 原始: [message] → 正常渲染 + +输入序列(含新增文件): + [read_file] [edit_file A (modify)] [create_file B] [edit_file B (modify)] + +分组结果: + Group 1: [read_file] → "已探索 · 1文件" + Group 2: [edit_file A] → 平铺展示 (单个 modify) + 原始: [create_file B] → 正常渲染 (新增不折叠) + Group 3: [edit_file B] → 平铺展示 (后续 modify) +``` + +### 6.8 Vibe 策略的覆盖 + +继承自 Quest,进一步激进化: + +**shouldFoldToolCall**: + +``` +function shouldFoldToolCall(block, context, isInGroup): + if NOT config.groupToolCalls: return { shouldFold: false } + + // SubAgent → 不折叠(这是唯一不折叠的) + if isSubAgent(block): return { shouldFold: false } + + toolInfo = extractToolInfo(block) + + // 确认工具中的特殊处理 + if isConfirmationTool(toolInfo): + // 终端工具 → 折叠(Vibe 下终端也折叠) + if isTerminalTool(block): return { shouldFold: true } + // 模式切换 → 不折叠 + if toolKind in [SWITCH_MODE, SWITCH_MODE_ENTER]: return { shouldFold: false } + // 其他确认工具 → 不折叠 + return { shouldFold: false } + + // 其他所有工具 → 折叠 + return { shouldFold: true, activityType: mapToActivityType(toolInfo) ?? 'edit' } +``` + +**shouldEndGroup**: + +``` +function shouldEndGroup(currentGroup, nextBlock, context): + if nextBlock is null: return true + + // SubAgent → 结束 + if isSubAgent(nextBlock): return true + + // 首次 plan → 结束 + if nextBlock.type == 'plan' AND nextBlock.isFirstUpdate: return true + + // tool_call → 不结束!(所有连续工具都在同一组,不管什么工具、什么文件) + if nextBlock.type == 'tool_call': return false + + // 非 tool_call → 用 Base 的逻辑(跳过 Quest 的同文件检查) + decision = shouldFoldBlock(nextBlock, context, isInGroup=true, currentGroup) + return NOT decision.shouldFold +``` + +**设计理念**:Vibe 模式的用户选择了"快速执行",意味着高度信任 Agent。所有中间过程(包括编辑、终端执行)都折叠,只展示 SubAgent 卡片和最终回复。 + +--- + +## 7. 分组元数据计算 + +### 7.1 ToolsSummary(工具汇总统计) + +``` +function buildToolsSummary(blocks, context): + summary = { files: 0, directories: 0, searches: 0, todos: 0, + terminalCommands: 0, edits: 0, toolCalls: 0, + problems: 0, hasProblemTool: false, hasListDirTool: false } + + filePathSet = Set() // 文件去重用 + + for block in blocks where block.type == 'tool_call': + activityType = getActivityTypeForTool(block) + + switch activityType: + case 'file': + filePath = extractFilePath(block) + if filePath: filePathSet.add(filePath) + else: summary.files++ // 无法提取路径时退化为计数 + case 'list': + summary.hasListDirTool = true + summary.directories++ + case 'search', 'web_search': + summary.searches++ + case 'terminal': + summary.terminalCommands++ + case 'todo': + if block.toolName == 'update_tasks': // 只统计更新,不统计查看 + summary.todos++ + case 'edit': + if context.sessionType == QUEST: + summary.edits++ // 只有自主模式统计编辑数 + else: + summary.toolCalls++ // 其他模式归入兜底(因为编辑在这些模式下不折叠,走到这里说明是错误的编辑) + case 'problems': + summary.hasProblemTool = true + summary.problems += extractResultArrayLength(block) // 按结果数组长度统计 + default: + summary.toolCalls++ // 兜底 + + summary.files += filePathSet.size // 加上去重后的文件数 + return summary +``` + +**文件路径提取的优先级**: +1. `result.locations[0].path` — 结构化位置信息 +2. `result.content` 中 `type='diff'` 的 `path` — diff 内容中的路径 +3. `rawInput.file_path / filePath / path / file` — 原始输入参数 + +### 7.2 isThinkingOnly 判断 + +``` +isThinkingOnly = blocks.length > 0 + AND blocks 中至少有一个 thought 块 + AND blocks 中所有块的 type 都是 'thought' 或 'message' +``` + +**用途**: +- 影响卡片标签:isThinkingOnly 的组显示"思考了 Ns"而非"已探索" +- 影响卡片摘要:isThinkingOnly 的组不显示摘要(标题已经包含了时长信息) +- 影响特殊渲染:isThinkingOnly 且 blocks <= 2 时平铺展示(不包装成卡片) + +### 7.3 isEditOnly 判断 + +``` +function checkEditOnlyGroup(blocks): + editFilePath = null + hasNonEdit = false + + for block in blocks: + if block.type == 'tool_call': + toolInfo = extractToolInfo(block) + if isEditingTool(toolInfo) OR isEditingKind(toolInfo): + filePath = extractFilePath(block) + if editFilePath is null: + editFilePath = filePath + else if editFilePath != filePath: + return { isEditOnly: false } // 不同文件 + continue + + // 遇到非编辑块(包括 thought、message 等) + hasNonEdit = true + break + + isEditOnly = editFilePath is not null AND NOT hasNonEdit + return { isEditOnly, editFilePath: isEditOnly ? editFilePath : null } +``` + +**用途**:在自主模式下,编辑组可以有特殊的标签和摘要(如显示文件名)。 + +### 7.4 思考时长计算 + +``` +function calculateThinkingDuration(blocks): + totalSeconds = null + + for block in blocks where block.type == 'thought': + millis = block.durationMillis + ?? (block.endTime - block.startTime) // fallback + ?? null + + if millis is not null: + // 关键:先单独取整,再累加 + ceiledSeconds = ceil(millis / 1000) + totalSeconds = (totalSeconds ?? 0) + ceiledSeconds + + return totalSeconds +``` + +**为什么先取整再累加**: + +考虑两个思考块,分别是 1.2s 和 1.3s: +- 方案 A(先加后整):1200 + 1300 = 2500ms → ceil(2.5) = **3s** +- 方案 B(先整后加):ceil(1.2) + ceil(1.3) = 2 + 2 = **4s** + +方案 B 与单个块独立展示时的时长一致(单独显示时分别是 2s 和 2s),聚合后也是 4s。选择方案 B 保证一致性。 + +### 7.5 hasErrorTool 检查 + +``` +function checkHasErrorTool(blocks): + for block in blocks where block.type == 'tool_call': + // 排除 SubAgent 和模式切换(它们的错误有独立的展示方式) + if block.isSubAgent: continue + if block.toolKind in [SUB_AGENT, SWITCH_MODE, SWITCH_MODE_ENTER]: continue + + if block.isError: return true + return false +``` + +**用途**:当 `hasErrorTool=true` 且分组只有一个块时,决定是否仍然渲染为卡片形式。 + +--- + +## 8. UI 交互设计 + +### 8.1 折叠卡片状态机 + +每个 ActivityGroupCard 的展开/收起状态受三个因素控制: + +``` + ┌───────────────────────────────┐ + │ ActivityGroupCard 状态 │ + │ │ + │ isExpanded: boolean │ + │ hasManuallyToggled: boolean │ + │ prevIsExploring: boolean │ + └───────────────────────────────┘ + +初始化: + isExpanded = (autoExpandWhileExploring AND group.isExploring) OR defaultExpanded + hasManuallyToggled = false + prevIsExploring = group.isExploring + +当 group.isExploring 变化时 (自动逻辑): + if hasManuallyToggled: return // 用户手动操作过 → 不再自动控制 + + if prevIsExploring == true AND group.isExploring == false: + // "进行中" → "已完成" + if autoCollapseOnComplete: isExpanded = false + + if prevIsExploring == false AND group.isExploring == true: + // "已完成" → "进行中"(新的 group 开始流入) + if autoExpandWhileExploring: isExpanded = true + + prevIsExploring = group.isExploring + +当用户点击时 (手动逻辑): + hasManuallyToggled = true // 标记为手动操作,永久禁用自动逻辑 + isExpanded = NOT isExpanded +``` + +**设计关键**:`hasManuallyToggled` 是**单向标记**(一旦设为 true 就不会变回 false)。这保证了用户的手动操作永远优先于系统的自动行为。即使后续 isExploring 再次变化,也不会干扰用户手动设定的状态。 + +### 8.2 特殊渲染规则(何时不渲染为卡片) + +尽管数据层已经创建了 ActivityGroup,但 UI 层不一定要将它渲染为折叠卡片。某些情况下"平铺"更好: + +``` +function renderActivityGroup(group, config): + // 规则 1:纯思考组且内容少 → 平铺 + if group.isThinkingOnly AND group.blocks.length <= 2: + return renderTileBlocks(group.blocks) + + // 规则 2:单个工具且无错误 → 平铺 + if group.blocks.length == 1: + if NOT group.hasErrorTool: + return renderTileBlocks(group.blocks) + else if config.foldSingleTool: + return renderTileBlocks(group.blocks) + + // 规则 3:标准折叠卡片 + return renderFoldableCard(group) +``` + +**规则 1 的原因**:1-2 个思考块单独包装成卡片视觉效果不好(太轻量了不值得额外包装),直接平铺展示更自然。3 个以上思考块才值得折叠。 + +**规则 2 的原因**:单个工具调用不需要"打开卡片才能看到内容"的额外交互步骤。但如果这个工具出了错,可能需要折叠以减少视觉干扰(按 `foldSingleTool` 配置决定)。 + +### 8.3 折叠卡片标题算法 + +``` +function getActivityGroupLabel(group): + // 纯思考组(已完成)→ "思考了 Ns" + if group.isThinkingOnly AND NOT group.isExploring: + seconds = max(group.thinkingDurationSeconds ?? 1, 1) + return formatThinkingDuration(seconds) // 如 "thought for 3s" + + // 根据会话类型和进行状态选择标签 + if group.sessionType == QUEST: + return group.isExploring ? "执行中" : "已执行" // i18n + else: + return group.isExploring ? "探索中" : "已探索" // i18n +``` + +### 8.4 折叠卡片摘要算法 + +``` +function getActivityGroupSummary(group): + // 纯思考组不需要摘要(标题已经是"思考了 Ns") + if group.isThinkingOnly: return '' + + summary = group.toolsSummary + parts = [] + + // 按固定优先级添加各维度 + if summary.files > 0: parts.push(formatSummary('file', summary.files, group.sessionType)) + if summary.searches > 0: parts.push(formatSummary('search', summary.searches, group.sessionType)) + if summary.todos > 0: parts.push(formatSummary('todo', summary.todos, group.sessionType)) + if summary.edits > 0: parts.push(formatSummary('edit', summary.edits, group.sessionType)) + if summary.hasProblemTool: parts.push(formatSummary('problem', summary.problems, group.sessionType)) + if summary.hasListDirTool: parts.push(formatSummary('directory', summary.directories, group.sessionType)) + if summary.terminalCommands > 0: parts.push(formatSummary('terminal', summary.terminalCommands, group.sessionType)) + + // 如果前面已经有内容,不再显示兜底的 toolCalls + if parts.length > 0: return parts.join(' ') + + // 兜底 + if summary.toolCalls > 0: parts.push(formatSummary('toolCall', summary.toolCalls, group.sessionType)) + return parts.join(' ') +``` + +**不同模式的摘要格式差异**: + +| 模式 | 格式 | 示例 | +|------|------|------| +| 对话模式 | 纯数字+名词 | "3 文件 5 搜索" | +| 自主模式 | 数字+量词+名词 | "3 个文件 5 次检索" | + +### 8.5 摘要的展示规则 + +- **进行中**(`isExploring=true`):只显示标签(如"探索中..."),**不显示摘要**(因为数字还在变化) +- **已完成**(`isExploring=false`):显示标签 + 摘要 + +### 8.6 展开状态持久化 + +**问题**:ActivityGroup 内部的子 block 可能有自己的展开/折叠状态(如思考块可以展开查看详情)。当 ActivityGroup 的数据因为流式更新而重新渲染时,子组件会卸载再挂载,导致展开状态丢失。 + +**方案**:将展开状态提升到会话级别,独立于组件生命周期。 + +``` +interface IBlockExpandStateManager { + getExpandState(blockId: string) → boolean | undefined + setExpandState(blockId: string, expanded: boolean) → void + toggleExpandState(blockId: string) → boolean + clearExpandState(blockId: string) → void + clearAllExpandStates() → void +} + +// 内部实现: +storage = Map // blockId → expanded +sessionId = current session ID // 当 sessionId 变化时清空 storage + +// Hook 封装: +function useBlockExpandState(blockId, defaultExpanded=false): + manager = getManagerFromContext() + [isExpanded, setLocal] = useState(manager.get(blockId) ?? defaultExpanded) + + setExpanded = (expanded) => + manager.set(blockId, expanded) + setLocal(expanded) + + toggleExpanded = () => + setLocal(prev => { newState = !prev; manager.set(blockId, newState); return newState }) + return !isExpanded + + return [isExpanded, setExpanded, toggleExpanded] +``` + +**关键设计**: +- 状态存储使用 `useRef`(Map 引用不变),不会因状态变化触发无关重渲染 +- 通过 React Context 在组件树中共享 manager 实例 +- `sessionId` 变化时自动清除旧状态,避免状态泄漏 + +### 8.7 分组内子组件的 UI 调整 + +通过上下文机制告知子组件"你在分组内": + +``` +// 在 ActivityGroupCard 渲染子 block 时: + + {group.blocks.map(block => renderBlock(block))} + + +// 子组件中: +function ToolCallComponent({ block }): + { isInActivityGroup } = useActivityGroupContext() + if isInActivityGroup: + // 隐藏冗余图标、简化展示 +``` + +### 8.8 折叠/展开动画 + +使用 CSS Grid 实现平滑的高度过渡: + +```css +/* 折叠/展开动画容器 */ +.content-wrapper { + display: grid; + grid-template-rows: 0fr; /* 折叠态:0 高度 */ + transition: grid-template-rows 0.25s ease-out; + overflow: hidden; +} + +.content-wrapper.expanded { + grid-template-rows: 1fr; /* 展开态:自然高度 */ +} + +.content-wrapper > .content { + min-height: 0; /* 允许收缩到 0 */ + overflow: hidden; +} +``` + +**为什么用 CSS Grid 而不是 `max-height`**:`max-height` 需要知道内容的具体高度(或设一个很大的值,但会导致动画时长不自然)。CSS Grid 的 `0fr → 1fr` 过渡不需要知道具体高度,动画效果更平滑。 + +--- + +## 9. 配置体系 + +### 9.1 配置项全表 + +| 配置项 | 类型 | 默认值 | 含义 | +|--------|------|--------|------| +| `groupToolCalls` | bool | true | 总开关:是否启用工具调用分组 | +| `groupThinking` | bool | true | 是否分组思考块 | +| `groupText` | bool | true | 是否分组短文本(仅在已有分组中生效) | +| `groupedTextMaxLength` | number | 150 | 短文本最大长度阈值(字符数) | +| `groupedTextMaxLines` | number | 2 | 短文本最大行数阈值 | +| `foldSingleTool` | bool | false | 单个工具时是否渲染为折叠卡片 | +| `autoExpandWhileExploring` | bool | true | 进行中时是否自动展开卡片 | +| `autoCollapseOnComplete` | bool | true | 完成时是否自动收起卡片 | +| `excludeToolCallIds` | string[] | [] | 待确认的工具 ID(这些不折叠) | +| `confirmedToolCallIds` | string[] | [] | 已确认的工具 ID | +| `foldAfterConfirmation` | bool | false | 工具确认后是否折叠回去 | +| `groupTodos` | bool | true | 是否分组 TODO 操作 | +| `groupSearchTools` | bool | true | 是否分组搜索类工具 | +| `groupReadFile` | bool | true | 是否分组读文件操作 | +| `isCompleted` | bool | (自动) | 会话是否已结束(由 streamState 自动推导) | + +### 9.2 配置合并 + +``` +最终配置 = { ...DEFAULT_CONFIG, ...用户覆盖配置 } +最终配置.isCompleted = isTerminalState(streamState) // 始终由流状态推导 +``` + +### 9.3 工具确认机制 + +在需要用户确认的场景(如 human-in-the-loop),配置项配合工作: + +1. 工具进入待确认状态 → 加入 `excludeToolCallIds` → 不折叠(用户需要看到并确认) +2. 用户确认 → 从 `excludeToolCallIds` 移除,加入 `confirmedToolCallIds` +3. 确认后的行为由 `foldAfterConfirmation` 决定: + - `false`(默认):确认后仍然独立展示 + - `true`:确认后按正常折叠规则处理(可能被折叠进分组) + +--- + +## 10. 复刻要点总结 + +### 10.1 最小可行实现 + +1. **定义工具分类**(探索/编辑/确认),这是折叠判断的基础 +2. **实现核心分组循环**(§5.2),线性扫描 + flush 机制 +3. **实现一套基础折叠规则**(探索工具折叠、编辑/确认不折叠、错误工具折叠) +4. **渲染折叠卡片**,支持点击展开/收起 +5. **实现工具汇总统计**(文件去重、搜索计数),显示在卡片摘要中 + +### 10.2 进阶特性(按优先级) + +6. **最后消息保护**(§5.3):保证每轮对话的最后文本输出不被折叠 +7. **isExploring 自动展开/收起**(§8.1):流式场景下的动态卡片状态 +8. **手动操作保护**(§8.1):用户手动操作后停止自动行为 +9. **策略模式**(§4):不同场景使用不同折叠规则 +10. **同文件编辑聚合**(§6.7):自主模式下对同文件的连续修改合并成一组 +11. **SubAgent 递归**(§5.6):嵌套子任务的分组处理 +12. **展开状态持久化**(§8.6):避免重渲染导致的子 block 展开状态丢失 +13. **特殊渲染规则**(§8.2):纯思考组、单工具组的平铺优化 + +### 10.3 设计原则 + +- **默认安全**:未知工具类型不折叠(保守策略),宁可多展示也不误折叠 +- **错误收敛**:失败的工具调用折叠起来,减少对主流程的视觉干扰 +- **尊重用户**:用户手动操作后不再自动干预,手动意图永远优先 +- **展示一致性**:思考时长等指标在单独展示和聚合展示中保持一致 +- **双重判断**:工具分类同时支持 toolKind(通用)和 toolName(具体),新增工具无需改分组代码 +- **可配置**:核心行为通过配置项控制,便于不同产品形态调整 +- **渐进增强**:策略继承体系让新模式只需覆盖差异部分,减少代码重复 diff --git a/docs/nacos/himarket-nacos-skill-integration-spec.md b/docs/nacos/himarket-nacos-skill-integration-spec.md new file mode 100644 index 000000000..ef08dc4ff --- /dev/null +++ b/docs/nacos/himarket-nacos-skill-integration-spec.md @@ -0,0 +1,707 @@ +# HiMarket 对接 Nacos Skill 管理 — 开发 Spec + +> 完全对标 Nacos SDK 能力,Nacos 不具备的能力 HiMarket 侧不做补充。 +> 对接方式:直接依赖 `nacos-maintainer-client` SDK,无需自行封装 REST 客户端。 + +--- + +## 一、SDK 依赖与初始化 + +### 1.1 Maven 依赖 + +```xml + + com.alibaba.nacos + nacos-maintainer-client + 3.2.04-SNAPSHOT + +``` + +### 1.2 初始化 + +```java +Properties properties = new Properties(); +properties.setProperty("serverAddr", "127.0.0.1:8848"); +// 如果 Nacos 开启鉴权 +properties.setProperty("username", "nacos"); +properties.setProperty("password", "nacos"); + +AiMaintainerService aiService = AiMaintainerFactory.createAiMaintainerService(properties); +``` + +### 1.3 Spring Bean 配置 + +```java +@Configuration +public class NacosSkillConfig { + + @Bean + public AiMaintainerService aiMaintainerService(NacosSkillProperties props) throws NacosException { + Properties properties = new Properties(); + properties.setProperty("serverAddr", props.getServerAddr()); + if (StringUtils.hasText(props.getUsername())) { + properties.setProperty("username", props.getUsername()); + } + if (StringUtils.hasText(props.getPassword())) { + properties.setProperty("password", props.getPassword()); + } + return AiMaintainerFactory.createAiMaintainerService(properties); + } +} +``` + +```yaml +nacos: + skill: + server-addr: 127.0.0.1:8848 + namespace: public + username: nacos + password: nacos +``` + +### 1.4 基础信息 + +| 项目 | 值 | +|------|-----| +| SDK 模块 | `nacos-maintainer-client` | +| 核心接口 | `SkillMaintainerService`(由 `AiMaintainerService` 继承) | +| 工厂类 | `AiMaintainerFactory.createAiMaintainerService(properties)` | +| 默认 Namespace | `public` | +| ZIP 上传限制 | 最大 10MB | +| 分页模型 | `Page` — `{ totalCount, pageNumber, pagesAvailable, pageItems[] }` | +| 异常 | 所有方法抛出 `NacosException` | +| 线程安全 | `AiMaintainerService` 实例线程安全,可作为单例 Bean | + +--- + +## 二、Nacos 数据模型 + +### 2.1 Skill(完整对象) + +```java +public class Skill { + private String namespaceId; // 命名空间 + private String name; // 唯一标识(英文字母、下划线、连字符) + private String description; // 描述 + private String instruction; // 指令内容(SKILL.md 的 Markdown body) + private Map resource; // 资源文件映射 +} +``` + +必填字段:`name`、`description`、`instruction` + +### 2.2 SkillResource(资源文件) + +```java +public class SkillResource { + private String name; // 文件名(含扩展名) + private String type; // 类型:template / data / script 等 + private String content; // 文件内容(文本原文或 base64) + private Map metadata; // 可选元数据 +} +``` + +### 2.3 SkillBasicInfo(列表项) + +```java +public class SkillBasicInfo { + private String namespaceId; + private String name; + private String description; + private Long updateTime; // 毫秒时间戳 +} +``` + +### 2.4 分页模型 + +```java +public class Page { + private int totalCount; + private int pageNumber; + private int pagesAvailable; + private List pageItems; +} +``` + +--- + +## 三、SDK 方法清单 + +### 3.1 创建 Skill(JSON 方式) + +```java +String registerSkill(String namespaceId, Skill skill) throws NacosException; +default String registerSkill(Skill skill) throws NacosException; // 默认 namespace +``` + +```java +Skill skill = new Skill(); +skill.setName("my-skill"); +skill.setDescription("A useful coding skill"); +skill.setInstruction("# My Skill\n\nDetailed instructions..."); + +Map resources = new HashMap<>(); +SkillResource res = new SkillResource(); +res.setName("config_template.json"); +res.setType("template"); +res.setContent("{ \"key\": \"value\" }"); +resources.put("config-template", res); +skill.setResource(resources); + +String skillName = aiService.registerSkill("public", skill); +``` + +### 3.2 创建/更新 Skill(ZIP 上传方式) + +```java +String uploadSkillFromZip(String namespaceId, byte[] zipBytes) throws NacosException; +default String uploadSkillFromZip(byte[] zipBytes) throws NacosException; +``` + +create-or-replace 语义:同名 Skill 不存在则创建,已存在则整体覆盖。 + +```java +byte[] zipBytes = Files.readAllBytes(Path.of("my-skill.zip")); +String skillName = aiService.uploadSkillFromZip("public", zipBytes); +``` + +ZIP 包要求: +- 根目录或一级子目录下必须包含 `SKILL.md` +- `SKILL.md` 必须有 YAML frontmatter,包含 `name` 和 `description` +- 自动过滤隐藏文件(`.` 开头)、macOS 元数据文件 +- 顶层目录自动剥离;二进制文件自动 base64 编码 + +> SDK 内部将 zipBytes 做 Base64 编码后发送。接近 10MB 时传输体积会增加约 33%, +> 如遇性能问题可参考 `SkillUploadService` 改用 HTTP multipart 直传。 + +### 3.3 查询详情 + +```java +Skill getSkillDetail(String namespaceId, String skillName) throws NacosException; +default Skill getSkillDetail(String skillName) throws NacosException; +``` + +### 3.4 更新 Skill + +```java +boolean updateSkill(String namespaceId, Skill skill) throws NacosException; +default boolean updateSkill(Skill skill) throws NacosException; +``` + +整体覆盖更新,传入完整 Skill 对象。 + +### 3.5 删除 Skill + +```java +boolean deleteSkill(String namespaceId, String skillName) throws NacosException; +default boolean deleteSkill(String skillName) throws NacosException; +``` + +### 3.6 分页列表 + +```java +Page listSkills(String namespaceId, String skillName, + String search, int pageNo, int pageSize) throws NacosException; +default Page listSkills(String skillName, int pageNo, int pageSize) throws NacosException; +``` + +| 参数 | 说明 | +|------|------| +| skillName | 按名称筛选(可为空) | +| search | `"accurate"`(精确)或 `"blur"`(模糊) | +| pageNo / pageSize | 分页参数 | + +--- + +## 四、Nacos 能力边界(HiMarket 对齐范围) + +以下是 capability-checklist 中列出的能力域,标注 Nacos 实际支持情况。 +Nacos 不支持的能力 HiMarket 直接阉割,不做本地补充。 + +| 序号 | 能力域 | Nacos 支持 | HiMarket 对齐方案 | +|------|--------|-----------|------------------| +| 1 | 元数据 CRUD | ✅ name/description/instruction/resource | 直接透传 SDK | +| 2 | 包上传 | ✅ ZIP 上传,自动解析 SKILL.md | 直接透传 SDK | +| 3 | 包下载 | ⚠️ 无直接 ZIP 下载接口 | HiMarket 从 getSkillDetail 取数据后打包 ZIP | +| 4 | SKILL.md 读写 | ✅ 通过 instruction 字段 | HiMarket 做 frontmatter 拼装/解析 | +| 5 | 文件浏览 | ⚠️ 通过 resource Map 可获取 | HiMarket 从 resource Map 构建文件树 | +| 6 | 列表查询 | ✅ 分页 + 名称筛选(精确/模糊) | 直接透传 SDK | +| 7 | 发布/下线 | ❌ | HiMarket 本地维护发布状态(门户可见性控制) | +| 8 | 状态管理 | ❌ | HiMarket 本地 skill_publish 表,轻量状态:已发布/未发布 | +| 9 | 分类体系 | ❌ | **阉割** | +| 10 | 标签管理 | ❌ | **阉割** | +| 11 | 图标管理 | ❌ | **阉割** | +| 12 | 下载计数 | ❌ | **阉割** | +| 13 | 发布历史 | ❌ | **阉割** | +| 14 | 安全约束 | ✅ ZIP 大小限制、路径穿越防护、隐藏文件过滤 | Nacos 侧处理 | +| 15 | CLI 工具 | ✅ nacos-cli 已实现 | 无需开发 | + +--- + +## 五、HiMarket 服务层设计 + +### 5.1 架构 + +``` +┌──────────────────────────────────────┐ +│ HiMarket Controller │ +│ (REST API — 面向前端/CLI) │ +├──────────────────────────────────────┤ +│ HiMarket SkillService │ +│ (SDK 透传 + 发布状态 + 视图构建) │ +├──────────────┬───────────────────────┤ +│ AiMaintainer │ skill_publish 表 │ +│ Service(SDK) │ (本地发布状态) │ +└──────────────┴───────────────────────┘ +``` + +HiMarket 仅做薄封装: +- 透传 SDK 的 CRUD / 列表 / 上传 +- 本地维护发布状态(控制门户可见性) +- 补充 Nacos 没有直接提供的视图层逻辑(文件树构建、ZIP 打包下载、SKILL.md 拼装) + +### 5.2 本地数据表 + +仅一张表,用于记录哪些 Skill 已发布到门户: + +```sql +CREATE TABLE skill_publish ( + skill_name VARCHAR(128) NOT NULL COMMENT '对应 Nacos skill name', + namespace VARCHAR(128) NOT NULL DEFAULT 'public', + published TINYINT(1) NOT NULL DEFAULT 0 COMMENT '0=未发布 1=已发布', + published_at DATETIME NULL COMMENT '最近发布时间', + unpublished_at DATETIME NULL COMMENT '最近下线时间', + PRIMARY KEY (namespace, skill_name) +) COMMENT 'HiMarket 门户发布状态'; +``` + +### 5.3 SkillService 接口 + +```java +public interface SkillService { + + // ========== CRUD ========== + + /** 创建 Skill(JSON) */ + String createSkill(Skill skill); + + /** 创建/更新 Skill(ZIP 上传) */ + String createOrUpdateSkillFromZip(byte[] zipBytes); + + /** 查询详情 */ + Skill getSkill(String skillName); + + /** 更新 Skill */ + boolean updateSkill(Skill skill); + + /** 删除 Skill(同时清理本地发布状态) */ + boolean deleteSkill(String skillName); + + /** 分页列表 */ + Page listSkills(String skillName, String search, int pageNo, int pageSize); + + // ========== 发布/下线(HiMarket 门户可见性) ========== + + /** 发布到门户:校验 Nacos 侧 Skill 存在 → 标记已发布 */ + void publishSkill(String skillName); + + /** 从门户下线 */ + void unpublishSkill(String skillName); + + /** 查询已发布的 Skill 列表(门户展示用) */ + Page listPublishedSkills(String skillName, int pageNo, int pageSize); + + /** 查询发布状态 */ + boolean isPublished(String skillName); + + // ========== 文件浏览 ========== + + FileTreeNode getFileTree(String skillName); + List getAllFiles(String skillName); + SkillFile getFileContent(String skillName, String filePath); + + // ========== 包下载 ========== + + String getSkillMd(String skillName); + StreamingResponseBody downloadSkillZip(String skillName); +} +``` + +### 5.4 关键业务流程 + +#### 创建 Skill(JSON) + +``` +1. 校验参数(name, description, instruction 必填) +2. aiService.registerSkill(namespace, skill) +3. 返回 skillName +``` + +#### 创建/更新 Skill(ZIP) + +``` +1. 校验文件大小(≤10MB) +2. aiService.uploadSkillFromZip(namespace, zipBytes) +3. 返回 skillName +``` + +#### 查询详情 + +``` +1. aiService.getSkillDetail(namespace, skillName) +2. 查询 skill_publish 表补充发布状态 +3. 返回 Skill + published 状态 +``` + +#### 发布到门户 + +``` +1. aiService.getSkillDetail(namespace, skillName) — 校验 Nacos 侧存在 +2. UPSERT skill_publish 表:published=1, published_at=now() +``` + +#### 从门户下线 + +``` +1. UPDATE skill_publish 表:published=0, unpublished_at=now() +``` + +#### 查询已发布列表(门户展示) + +``` +1. 从 skill_publish 表查出所有 published=1 的 skillName 列表 +2. 调用 aiService.listSkills 获取详情,过滤出已发布的 +3. 返回分页结果 +``` + +#### 删除 Skill + +``` +1. aiService.deleteSkill(namespace, skillName) +2. DELETE FROM skill_publish WHERE skill_name = ? +``` + +#### 文件树构建 + +``` +1. aiService.getSkillDetail(namespace, skillName) +2. 从 resource Map 按路径构建树形结构 + SKILL.md 虚拟节点 +3. 目录优先排序 +``` + +#### ZIP 打包下载 + +``` +1. aiService.getSkillDetail(namespace, skillName) +2. 生成 SKILL.md(frontmatter + instruction) +3. 遍历 resource Map,逐个写入 ZipEntry +4. 流式返回 +``` + +--- + +## 六、HiMarket REST API + +### 6.1 接口总览 + +| 序号 | 方法 | 路径 | 说明 | 数据来源 | +|------|------|------|------|---------| +| 1 | POST | /api/v1/skills | 创建 Skill(JSON) | SDK registerSkill | +| 2 | POST | /api/v1/skills/upload | 创建/更新 Skill(ZIP) | SDK uploadSkillFromZip | +| 3 | GET | /api/v1/skills | 分页列表 | SDK listSkills | +| 4 | GET | /api/v1/skills/{name} | 查询详情 | SDK getSkillDetail + 本地发布状态 | +| 5 | PUT | /api/v1/skills/{name} | 更新 Skill | SDK updateSkill | +| 6 | DELETE | /api/v1/skills/{name} | 删除 Skill | SDK deleteSkill + 清理本地状态 | +| 7 | POST | /api/v1/skills/{name}/publish | 发布到门户 | SDK getSkillDetail(校验)+ 本地 | +| 8 | DELETE | /api/v1/skills/{name}/publish | 从门户下线 | 本地 | +| 9 | GET | /api/v1/skills/published | 已发布列表(门户/CLI) | 本地 + SDK | +| 10 | GET | /api/v1/skills/{name}/document | 获取 SKILL.md | SDK getSkillDetail | +| 11 | GET | /api/v1/skills/{name}/download | 下载 ZIP 包 | SDK getSkillDetail | +| 12 | GET | /api/v1/skills/{name}/files/tree | 文件树 | SDK getSkillDetail | +| 13 | GET | /api/v1/skills/{name}/files | 所有文件(含内容) | SDK getSkillDetail | +| 14 | GET | /api/v1/skills/{name}/files/content?path=x | 单个文件内容 | SDK getSkillDetail | + +### 6.2 请求/响应示例 + +#### 创建 Skill(JSON) + +``` +POST /api/v1/skills +``` +```json +{ + "name": "my-skill", + "description": "A useful coding skill", + "instruction": "# My Skill\n\nDetailed instructions...", + "resources": { + "config-template": { + "name": "config_template.json", + "type": "template", + "content": "{ \"key\": \"value\" }" + } + } +} +``` +```json +{ "code": 0, "data": "my-skill" } +``` + +#### 创建/更新 Skill(ZIP) + +``` +POST /api/v1/skills/upload +Content-Type: multipart/form-data +file: +``` +```json +{ "code": 0, "data": "my-skill" } +``` + +#### 查询详情 + +``` +GET /api/v1/skills/my-skill +``` +```json +{ + "code": 0, + "data": { + "namespaceId": "public", + "name": "my-skill", + "description": "A useful coding skill", + "instruction": "# My Skill\n\nDetailed instructions...", + "published": true, + "publishedAt": "2025-06-01T12:00:00", + "resource": { + "config-template": { + "name": "config_template.json", + "type": "template", + "content": "{ \"key\": \"value\" }", + "metadata": {} + } + } + } +} +``` + +#### 分页列表 + +``` +GET /api/v1/skills?name=coding&search=blur&pageNo=1&pageSize=20 +``` +```json +{ + "code": 0, + "data": { + "totalCount": 50, + "pageNumber": 1, + "pagesAvailable": 3, + "pageItems": [ + { + "namespaceId": "public", + "name": "my-skill", + "description": "A useful coding skill", + "updateTime": 1719792000000 + } + ] + } +} +``` + +#### 获取 SKILL.md + +``` +GET /api/v1/skills/my-skill/document +``` +```text +--- +name: my-skill +description: A useful coding skill +--- + +# My Skill + +Detailed instructions... +``` + +#### 下载 ZIP 包 + +``` +GET /api/v1/skills/my-skill/download +Content-Type: application/zip +Content-Disposition: attachment; filename="my-skill.zip" +``` + +#### 文件树 + +``` +GET /api/v1/skills/my-skill/files/tree +``` +```json +{ + "code": 0, + "data": { + "name": "my-skill", + "type": "directory", + "children": [ + { "name": "SKILL.md", "path": "SKILL.md", "type": "file", "size": 512 }, + { + "name": "scripts", + "path": "scripts", + "type": "directory", + "children": [ + { "name": "setup.sh", "path": "scripts/setup.sh", "type": "file", "size": 1024 } + ] + } + ] + } +} +``` + +#### 发布到门户 + +``` +POST /api/v1/skills/my-skill/publish +``` +```json +{ "code": 0, "data": "ok" } +``` + +#### 从门户下线 + +``` +DELETE /api/v1/skills/my-skill/publish +``` +```json +{ "code": 0, "data": "ok" } +``` + +#### 已发布列表(门户展示) + +``` +GET /api/v1/skills/published?name=coding&pageNo=1&pageSize=20 +``` +```json +{ + "code": 0, + "data": { + "totalCount": 10, + "pageNumber": 1, + "pagesAvailable": 1, + "pageItems": [ + { + "namespaceId": "public", + "name": "my-skill", + "description": "A useful coding skill", + "updateTime": 1719792000000 + } + ] + } +} +``` + +--- + +## 七、HiMarket 需实现的工具逻辑 + +Nacos SDK 不直接提供但 HiMarket 需要在服务层实现的三块逻辑: + +### 7.1 SKILL.md 拼装 + +从 Skill 对象生成 SKILL.md(参考 Nacos `SkillUtils.toMarkdown()`): + +```java +public static String buildSkillMd(Skill skill) { + StringBuilder sb = new StringBuilder(); + sb.append("---\n"); + sb.append("name: ").append(escapeYamlValue(skill.getName())).append("\n"); + sb.append("description: ").append(escapeYamlValue(skill.getDescription())).append("\n"); + sb.append("---\n\n"); + if (skill.getInstruction() != null && !skill.getInstruction().isBlank()) { + sb.append(skill.getInstruction().trim()).append("\n"); + } + return sb.toString(); +} +``` + +### 7.2 文件树构建 + +从 `Skill.resource` Map 构建树形结构: + +```java +public FileTreeNode buildFileTree(Skill skill) { + FileTreeNode root = new FileTreeNode(skill.getName(), "", "directory"); + + // SKILL.md 虚拟节点 + String md = buildSkillMd(skill); + root.addFile("SKILL.md", "SKILL.md", md.length()); + + // resource 文件 + if (skill.getResource() != null) { + for (SkillResource res : skill.getResource().values()) { + root.addFileByPath(res.getName(), + res.getContent() != null ? res.getContent().length() : 0); + } + } + root.sortChildren(); // 目录优先 + return root; +} +``` + +### 7.3 ZIP 打包下载 + +从 Skill 对象重新打包为 ZIP 流式返回: + +```java +public StreamingResponseBody downloadAsZip(Skill skill) { + return out -> { + try (ZipOutputStream zos = new ZipOutputStream(out)) { + // SKILL.md + zos.putNextEntry(new ZipEntry("SKILL.md")); + zos.write(buildSkillMd(skill).getBytes(StandardCharsets.UTF_8)); + zos.closeEntry(); + + // resource 文件 + if (skill.getResource() != null) { + for (SkillResource res : skill.getResource().values()) { + zos.putNextEntry(new ZipEntry(res.getName())); + zos.write(res.getContent().getBytes(StandardCharsets.UTF_8)); + zos.closeEntry(); + } + } + } + }; +} +``` + +--- + +## 八、开发任务拆解 + +| 任务 | 优先级 | 预估 | 说明 | +|------|--------|------|------| +| T1 SDK Bean 配置 | P0 | 0.5d | NacosSkillConfig + Properties | +| T2 SkillService CRUD | P0 | 1d | 透传 SDK 的 6 个方法 | +| T3 SkillController | P0 | 0.5d | 14 个 REST 接口 | +| T4 ZIP 上传接口 | P0 | 0.5d | MultipartFile → byte[] → SDK | +| T5 SKILL.md 拼装 | P0 | 0.5d | frontmatter + instruction | +| T6 skill_publish 表 + 发布/下线 | P0 | 1d | DDL + Repository + 发布/下线/已发布列表 | +| T7 文件树构建 | P1 | 0.5d | resource Map → 树形结构 | +| T8 ZIP 打包下载 | P1 | 0.5d | getSkillDetail → ZipOutputStream | +| T9 单文件/全量文件查询 | P1 | 0.5d | resource Map 按路径查找 | +| T10 集成测试 | P0 | 1d | 对接真实 Nacos 实例 | + +总预估:约 6.5 人天 + +--- + +## 九、注意事项 + +1. Nacos 用 `name` 作为 Skill 唯一标识(非自增 ID),HiMarket 的 skillId 直接使用 name +2. name 只允许英文字母、下划线、连字符 +3. Nacos 支持 namespace 隔离,HiMarket 配置默认 namespace 即可,大部分场景无需用户感知 +4. SDK 鉴权:初始化时传入 username/password,SDK 内部自动处理 token +5. `uploadSkillFromZip` 是 create-or-replace 语义,HiMarket 无需区分创建和更新 +6. 发布状态仅存在于 HiMarket 本地 `skill_publish` 表,Nacos 侧无感知。删除 Skill 时需同步清理本地发布记录 +7. 本 spec 基于 Nacos 开发分支(3.2.04-SNAPSHOT),需要 Nacos 开启 AI 功能(`ConditionAiEnabled`) +8. `AiMaintainerService` 线程安全,作为单例 Bean 注入即可 diff --git a/docs/skill-market-capability-checklist.md b/docs/skill-market-capability-checklist.md new file mode 100644 index 000000000..c3cde784b --- /dev/null +++ b/docs/skill-market-capability-checklist.md @@ -0,0 +1,288 @@ +# Skill 市场能力需求清单 + +> 本文档基于 HiMarket 现有 Skill 市场的功能梳理,提炼出 Skill 管理组件需要提供的通用接口和能力要求。 +> 目标:作为 Nacos Skill 管理能力的对齐清单,确保原子能力覆盖完整。 + +## 一、Skill 生命周期管理 + +### 1.1 基础 CRUD + +| 序号 | 能力 | 方法 | 说明 | +|------|------|------|------| +| 1 | 创建 Skill | POST | 创建一个 Skill 记录,包含名称、描述、图标、标签等元数据 | +| 2 | 查询 Skill 列表 | GET | 分页查询,支持按名称、状态、分类等条件筛选 | +| 3 | 查询 Skill 详情 | GET | 根据 Skill ID 返回完整信息,包含元数据、标签、下载计数等 | +| 4 | 更新 Skill | PUT | 更新名称、描述、图标、标签等元数据(部分更新) | +| 5 | 删除 Skill | DELETE | 删除 Skill 及其关联的所有文件数据 | + +### 1.2 发布与下线 + +| 序号 | 能力 | 方法 | 说明 | +|------|------|------|------| +| 6 | 发布 Skill | POST | 将 Skill 设为已发布状态,对外可见可下载 | +| 7 | 下线 Skill | DELETE | 取消发布,不再对外展示 | +| 8 | 查询发布记录 | GET | 分页查询 Skill 的发布历史 | + +### 1.3 分类管理 + +| 序号 | 能力 | 方法 | 说明 | +|------|------|------|------| +| 9 | 设置分类 | POST | 为 Skill 绑定一个或多个分类标签 | +| 10 | 查询分类 | GET | 获取 Skill 已绑定的分类列表 | + +## 二、Skill 包管理 + +### 2.1 上传与解析 + +| 序号 | 能力 | 方法 | 说明 | +|------|------|------|------| +| 11 | 上传 Skill 包 | POST | 接收 ZIP 或 TAR.GZ 压缩包,自动解析并存储所有文件;替换式更新(每次上传覆盖旧文件) | + +上传时需自动完成: +- 解压并遍历所有文件 +- 查找并解析 SKILL.md 的 YAML frontmatter,提取 `name` 和 `description` +- 自动检测文件编码(文本 / 二进制) +- 过滤隐藏文件(`.` 开头)和路径穿越条目(含 `..`) +- 如果所有文件在同一顶层目录下,自动剥离该目录前缀 + +### 2.2 下载与分发 + +| 序号 | 能力 | 方法 | 说明 | +|------|------|------|------| +| 12 | 下载 SKILL.md | GET | 返回 SKILL.md 原始 Markdown 内容;需校验 Skill 已发布;自动递增下载计数 | +| 13 | 下载完整 Skill 包 | GET | 将所有文件打包为 ZIP 流式返回 | + +### 2.3 文件浏览 + +| 序号 | 能力 | 方法 | 说明 | +|------|------|------|------| +| 14 | 获取文件树 | GET | 返回树形目录结构(目录优先排序),不含文件内容 | +| 15 | 获取所有文件(含内容) | GET | 返回所有文件的路径、内容、编码、大小 | +| 16 | 获取单个文件内容 | GET | 按路径获取指定文件的内容 | + +### 2.4 SKILL.md 独立更新 + +| 序号 | 能力 | 方法 | 说明 | +|------|------|------|------| +| 17 | 更新 SKILL.md | PUT | 单独更新 SKILL.md 内容,同步更新元数据和文件存储 | + +## 三、查询与检索 + +### 3.1 精简列表接口(供 CLI / SDK 消费) + +| 序号 | 能力 | 方法 | 说明 | +|------|------|------|------| +| 18 | 获取已发布 Skill 列表 | GET | 返回所有已发布 Skill 的精简信息,供命令行工具和 SDK 快速获取可用 Skill | + +返回字段: +- `skillId` — 唯一标识 +- `name` — Skill 名称 +- `description` — 描述 +- `skillTags` — 标签列表 + +### 3.2 列表查询支持的筛选条件 + +| 参数 | 类型 | 说明 | +|------|------|------| +| status | String | 状态筛选(草稿 / 就绪 / 已发布) | +| name | String | 按名称模糊搜索 | +| categoryIds | List\ | 按分类筛选 | +| excludeCategoryId | String | 排除某个分类 | +| page / size | int | 分页参数 | + + +## 四、数据模型 + +### 4.1 Skill 元数据 + +| 字段 | 类型 | 说明 | +|------|------|------| +| skillId | String | 唯一标识 | +| name | String(50) | Skill 名称 | +| description | String(1000) | Skill 描述 | +| status | Enum | `DRAFT` → `READY` → `PUBLISHED` | +| document | Text | SKILL.md 原始内容(冗余存储,方便快速读取) | +| icon | JSON | 图标信息 | +| skillTags | List\ | 标签列表 | +| downloadCount | Long | 下载次数 | +| createdAt | DateTime | 创建时间 | +| updatedAt | DateTime | 更新时间 | + +### 4.2 Skill 文件 + +| 字段 | 类型 | 说明 | +|------|------|------| +| id | Long | 自增主键 | +| skillId | String | 关联 Skill ID | +| path | String(512) | 文件相对路径(如 `scripts/setup.sh`) | +| encoding | String(16) | `text` 或 `base64` | +| content | LargeText | 文件内容(文本原文或 base64 编码) | +| size | Int | 文件大小(字节) | +| 唯一约束 | — | `(skillId, path)` | + +## 五、返回数据结构参考 + +### 5.1 Skill 详情 + +```json +{ + "skillId": "skill-001", + "name": "my-skill", + "description": "A useful coding skill", + "status": "PUBLISHED", + "document": "---\nname: my-skill\ndescription: A useful coding skill\n---\n# My Skill\n...", + "icon": { "type": "emoji", "value": "🔧" }, + "skillTags": ["coding", "python"], + "downloadCount": 42, + "categories": [...], + "createdAt": "2025-01-01T00:00:00", + "updatedAt": "2025-06-01T00:00:00" +} +``` + +### 5.2 精简列表项(CLI 接口) + +```json +{ + "skillId": "skill-001", + "name": "my-skill", + "description": "A useful coding skill", + "skillTags": ["coding", "python"] +} +``` + +### 5.3 文件树节点 + +```json +{ + "name": "scripts", + "path": "scripts", + "type": "directory", + "children": [ + { + "name": "setup.sh", + "path": "scripts/setup.sh", + "type": "file", + "encoding": "text", + "size": 1024 + } + ] +} +``` + +### 5.4 文件内容 + +```json +{ + "path": "SKILL.md", + "content": "---\nname: my-skill\n---\n...", + "encoding": "text", + "size": 512 +} +``` + +### 5.5 上传结果 + +```json +{ + "fileCount": 12 +} +``` + +## 六、SKILL.md 规范 + +SKILL.md 是每个 Skill 包的核心描述文件,格式为 YAML frontmatter + Markdown body: + +```markdown +--- +name: my-skill +description: A useful coding skill +--- + +# My Skill + +这里是 Skill 的详细说明... +``` + +frontmatter 必填字段: +- `name` — Skill 名称 +- `description` — Skill 描述 + +## 七、业务规则与约束 + +| 规则 | 说明 | +|------|------| +| 包格式 | 支持 ZIP 和 TAR.GZ | +| SKILL.md 必须存在 | 压缩包根目录或一级子目录下必须有 SKILL.md | +| YAML frontmatter 校验 | 必须包含 `name` 和 `description` 字段 | +| 单文件大小限制 | 最大 5MB | +| 文件数量限制 | 最多 500 个文件 | +| 编码自动检测 | 已知文本后缀 → text;已知二进制后缀 → base64;未知后缀 → 尝试 UTF-8 解码,失败则 base64 | +| 隐藏文件过滤 | 文件名以 `.` 开头的自动跳过 | +| 路径穿越防护 | 路径中包含 `..` 的条目自动跳过 | +| 顶层目录剥离 | 如果所有文件都在同一顶层目录下,自动去除该目录前缀 | +| 下载计数 | 每次下载 SKILL.md 自动递增计数 | +| 状态流转 | 创建即就绪(无需额外绑定),发布后对外可见 | + +## 八、Nacos CLI 工具 + +除了服务端 API,还需要提供一个 `nacos-cli` 命令行工具,用于在沙箱环境或开发机器上直接操作 Skill。 + +### 8.1 核心命令 + +| 命令 | 说明 | +|------|------| +| `nacos-cli skill list` | 列出所有已发布的 Skill(名称、描述、标签) | +| `nacos-cli skill search ` | 按关键词搜索 Skill | +| `nacos-cli skill install [--dir ]` | 下载 Skill 包并解压到指定目录(默认当前目录) | +| `nacos-cli skill info ` | 查看 Skill 详情(描述、标签、文件列表、下载次数等) | + +### 8.2 使用场景 + +主要用于沙箱环境中,AI Agent 或开发者可以通过 CLI 直接将 Skill 下载到工作目录: + +```bash +# 列出可用 Skill +nacos-cli skill list + +# 安装 Skill 到指定目录 +nacos-cli skill install my-skill --dir .claude/skills/my-skill + +# 查看 Skill 详情 +nacos-cli skill info my-skill +``` + +### 8.3 要求 + +- 单二进制分发,无额外运行时依赖(方便预装到沙箱镜像) +- 支持通过环境变量或参数指定 Nacos 服务端地址 +- `install` 命令下载完整 Skill 包(ZIP)并自动解压到目标目录 +- 输出格式支持 JSON(`--output json`),方便程序化调用 + +## 九、能力总结 + +Nacos 作为 Skill 管理组件,需要覆盖以下 10 个能力域: + +| 序号 | 能力域 | 具体要求 | +|------|--------|----------| +| 1 | 元数据 CRUD | Skill 的名称、描述、图标、标签、状态的增删改查 | +| 2 | 包存储与分发 | 上传压缩包 → 解析 → 存储多文件 → 按需下载(ZIP 流式输出) | +| 3 | SKILL.md 读写 | 作为 Skill 的核心描述文件,支持独立读取和更新 | +| 4 | 文件浏览 | 文件树查询 + 单文件内容获取 + 全量文件获取 | +| 5 | 发布/下线 | 生命周期管理,支持发布、下线、查询发布历史 | +| 6 | 分类体系 | 支持多分类绑定和按分类筛选 | +| 7 | 统计计数 | 下载次数统计(每次下载自动递增) | +| 8 | 列表查询 | 分页 + 多条件筛选(状态、名称、分类) | +| 9 | CLI 友好接口 | 精简列表接口,返回 skillId、name、description、skillTags | +| 10 | 安全约束 | 文件大小/数量限制、路径穿越防护、隐藏文件过滤、编码自动检测 | +| 11 | CLI 工具 | 提供 nacos-cli,支持在沙箱/开发环境中列出、搜索、安装 Skill 到指定目录 | + + + + + +\1. /api/v1/nacos/{nacosId}/skills/... 这个路径不对,不应该包含 nacos,skill 是 himarket 的一等公民 + +\2. ConfigFileBuilder 为什么下边是 QoderCliConfigGenerator,是因为 QoderCli 有什么特殊逻辑吗?qwen code/claude code/open code 应该是相同的逻辑啊 + +\3. 阅读下 nacos 的源码,现在上传 zip 包的这个逻辑应该不在 diff --git a/himarket-bootstrap/Dockerfile b/himarket-bootstrap/Dockerfile index 6106ab534..8e655d7ca 100644 --- a/himarket-bootstrap/Dockerfile +++ b/himarket-bootstrap/Dockerfile @@ -1,20 +1,20 @@ -FROM dragonwell-registry.cn-hangzhou.cr.aliyuncs.com/dragonwell/dragonwell:17-standard-ga-anolis +FROM opensource-registry.cn-hangzhou.cr.aliyuncs.com/higress-group/dragonwell:17-standard-ga-anolis WORKDIR /app COPY target/*.jar app.jar # Install Arthas (download full package with fixed version) -RUN ARTHAS_VERSION=4.1.4 && \ - yum install -y unzip && \ - mkdir -p /root/.arthas/lib/${ARTHAS_VERSION} && \ - curl -L https://github.com/alibaba/arthas/releases/download/arthas-all-${ARTHAS_VERSION}/arthas-bin.zip -o /tmp/arthas-bin.zip && \ - unzip /tmp/arthas-bin.zip -d /root/.arthas/lib/${ARTHAS_VERSION}/ && \ - rm -f /tmp/arthas-bin.zip && \ - yum clean all && \ - curl -L https://arthas.aliyun.com/arthas-boot.jar -o /opt/arthas-boot.jar && \ - echo '#!/bin/sh' > /usr/local/bin/as.sh && \ - echo 'java -jar /opt/arthas-boot.jar "$@"' >> /usr/local/bin/as.sh && \ - chmod +x /usr/local/bin/as.sh +# RUN ARTHAS_VERSION=4.1.4 && \ +# yum install -y unzip && \ +# mkdir -p /root/.arthas/lib/${ARTHAS_VERSION} && \ +# curl -L https://github.com/alibaba/arthas/releases/download/arthas-all-${ARTHAS_VERSION}/arthas-bin.zip -o /tmp/arthas-bin.zip && \ +# unzip /tmp/arthas-bin.zip -d /root/.arthas/lib/${ARTHAS_VERSION}/ && \ +# rm -f /tmp/arthas-bin.zip && \ +# yum clean all && \ +# curl -L https://arthas.aliyun.com/arthas-boot.jar -o /opt/arthas-boot.jar && \ +# echo '#!/bin/sh' > /usr/local/bin/as.sh && \ +# echo 'java -jar /opt/arthas-boot.jar "$@"' >> /usr/local/bin/as.sh && \ +# chmod +x /usr/local/bin/as.sh EXPOSE 8080 diff --git a/himarket-bootstrap/pom.xml b/himarket-bootstrap/pom.xml index cc235d6ec..3082d8e55 100644 --- a/himarket-bootstrap/pom.xml +++ b/himarket-bootstrap/pom.xml @@ -34,6 +34,13 @@ flyway-mysql + + + com.h2database + h2 + runtime + + diff --git a/himarket-bootstrap/src/main/java/com/alibaba/himarket/HiMarketApplication.java b/himarket-bootstrap/src/main/java/com/alibaba/himarket/HiMarketApplication.java index f48e66624..4765ec8e0 100644 --- a/himarket-bootstrap/src/main/java/com/alibaba/himarket/HiMarketApplication.java +++ b/himarket-bootstrap/src/main/java/com/alibaba/himarket/HiMarketApplication.java @@ -19,12 +19,15 @@ package com.alibaba.himarket; +import com.alibaba.himarket.config.AcpProperties; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.data.jpa.repository.config.EnableJpaAuditing; @SpringBootApplication @EnableJpaAuditing +@EnableConfigurationProperties({AcpProperties.class}) public class HiMarketApplication { public static void main(String[] args) { diff --git a/himarket-bootstrap/src/main/java/com/alibaba/himarket/config/FlywayConfig.java b/himarket-bootstrap/src/main/java/com/alibaba/himarket/config/FlywayConfig.java index 7fab4d207..f96bbbd77 100644 --- a/himarket-bootstrap/src/main/java/com/alibaba/himarket/config/FlywayConfig.java +++ b/himarket-bootstrap/src/main/java/com/alibaba/himarket/config/FlywayConfig.java @@ -24,12 +24,14 @@ import org.flywaydb.core.Flyway; import org.flywaydb.core.api.FlywayException; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; @Slf4j @Configuration +@ConditionalOnProperty(name = "app.flyway.enabled", havingValue = "true", matchIfMissing = true) public class FlywayConfig { @Value("${app.flyway.auto-repair:true}") diff --git a/himarket-bootstrap/src/main/java/com/alibaba/himarket/config/SecurityConfig.java b/himarket-bootstrap/src/main/java/com/alibaba/himarket/config/SecurityConfig.java index 900bbf59a..11b657272 100644 --- a/himarket-bootstrap/src/main/java/com/alibaba/himarket/config/SecurityConfig.java +++ b/himarket-bootstrap/src/main/java/com/alibaba/himarket/config/SecurityConfig.java @@ -58,7 +58,11 @@ public class SecurityConfig { "/developers/oidc/authorize", "/developers/oidc/callback", "/developers/oidc/providers", - "/developers/oauth2/token" + "/developers/oauth2/token", + "/ws/acp", + "/ws/terminal", + "/cli-providers", + "/skills/*/download" }; // Swagger endpoints diff --git a/himarket-bootstrap/src/main/java/com/alibaba/himarket/config/WebSocketConfig.java b/himarket-bootstrap/src/main/java/com/alibaba/himarket/config/WebSocketConfig.java new file mode 100644 index 000000000..742ca00c3 --- /dev/null +++ b/himarket-bootstrap/src/main/java/com/alibaba/himarket/config/WebSocketConfig.java @@ -0,0 +1,43 @@ +package com.alibaba.himarket.config; + +import com.alibaba.himarket.service.hicoding.terminal.TerminalWebSocketHandler; +import com.alibaba.himarket.service.hicoding.websocket.HiCodingHandshakeInterceptor; +import com.alibaba.himarket.service.hicoding.websocket.HiCodingWebSocketHandler; +import lombok.RequiredArgsConstructor; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.web.socket.config.annotation.EnableWebSocket; +import org.springframework.web.socket.config.annotation.WebSocketConfigurer; +import org.springframework.web.socket.config.annotation.WebSocketHandlerRegistry; +import org.springframework.web.socket.server.standard.ServletServerContainerFactoryBean; + +@Configuration +@EnableWebSocket +@RequiredArgsConstructor +public class WebSocketConfig implements WebSocketConfigurer { + + private final HiCodingWebSocketHandler hiCodingWebSocketHandler; + private final TerminalWebSocketHandler terminalWebSocketHandler; + private final HiCodingHandshakeInterceptor hiCodingHandshakeInterceptor; + + @Override + public void registerWebSocketHandlers(WebSocketHandlerRegistry registry) { + registry.addHandler(hiCodingWebSocketHandler, "/ws/acp") + .addInterceptors(hiCodingHandshakeInterceptor) + .setAllowedOrigins("*"); + registry.addHandler(terminalWebSocketHandler, "/ws/terminal") + .addInterceptors(hiCodingHandshakeInterceptor) + .setAllowedOrigins("*"); + } + + @Bean + public ServletServerContainerFactoryBean createWebSocketContainer() { + ServletServerContainerFactoryBean container = new ServletServerContainerFactoryBean(); + // 10MB — matches the frontend 5MB file-size cap after base64 expansion + JSON overhead + container.setMaxTextMessageBufferSize(10 * 1024 * 1024); + container.setMaxBinaryMessageBufferSize(10 * 1024 * 1024); + // 120秒空闲超时,配合30秒ping间隔,允许连续丢失2-3个ping仍不超时 + container.setMaxSessionIdleTimeout(120_000L); + return container; + } +} diff --git a/himarket-bootstrap/src/main/resources/application-h2.yml b/himarket-bootstrap/src/main/resources/application-h2.yml new file mode 100644 index 000000000..2fd1141a9 --- /dev/null +++ b/himarket-bootstrap/src/main/resources/application-h2.yml @@ -0,0 +1,19 @@ +spring: + datasource: + url: jdbc:h2:file:${H2_DB_PATH:${user.home}/.himarket/h2db};MODE=MySQL;DATABASE_TO_LOWER=TRUE;CASE_INSENSITIVE_IDENTIFIERS=TRUE;AUTO_RECONNECT=TRUE + username: sa + password: + driver-class-name: org.h2.Driver + jpa: + hibernate: + ddl-auto: update + flyway: + enabled: false + h2: + console: + enabled: true + path: /h2-console + +app: + flyway: + enabled: false diff --git a/himarket-bootstrap/src/main/resources/application.yml b/himarket-bootstrap/src/main/resources/application.yml index c70ee3b66..bd9d0a4d7 100644 --- a/himarket-bootstrap/src/main/resources/application.yml +++ b/himarket-bootstrap/src/main/resources/application.yml @@ -38,7 +38,51 @@ springdoc: jwt: secret: YourJWTSecret - expiration: 2h + expiration: 7d + +acp: + default-provider: ${ACP_DEFAULT_PROVIDER:qwen-code} + default-runtime: ${ACP_DEFAULT_RUNTIME:remote} + remote: + host: ${ACP_REMOTE_HOST:sandbox-shared} + port: ${ACP_REMOTE_PORT:8080} + providers: + qwen-code: + display-name: Qwen Code + command: ${ACP_CLI_COMMAND_QWEN:qwen} + args: ${ACP_CLI_ARGS_QWEN:--acp} + compatible-runtimes: + - REMOTE + supports-custom-model: true + supports-mcp: true + supports-skill: true + qodercli: + display-name: Qoder CLI + command: ${ACP_CLI_COMMAND_QODERCLI:qodercli} + args: ${ACP_CLI_ARGS_QODERCLI:--acp} + compatible-runtimes: + - REMOTE + auth-options: default,personal_access_token + auth-env-var: QODER_PERSONAL_ACCESS_TOKEN + claude-code: + display-name: Claude Code + command: ${ACP_CLI_COMMAND_CLAUDE:claude-agent-acp} + args: ${ACP_CLI_ARGS_CLAUDE:} + compatible-runtimes: + - REMOTE + auth-env-var: ANTHROPIC_API_KEY + supports-custom-model: true + supports-mcp: true + supports-skill: true + opencode: + display-name: OpenCode + command: ${ACP_CLI_COMMAND_OPENCODE:opencode} + args: ${ACP_CLI_ARGS_OPENCODE:acp} + compatible-runtimes: + - REMOTE + supports-custom-model: true + supports-mcp: true + supports-skill: true sls: # SLS服务端点(必填) @@ -58,4 +102,4 @@ sls: # CR所在的namespace namespace: ${SLS_ALIYUN_LOG_CONFIG_NAMESPACE:apigateway-system} # CR的名称 - cr-name: ${SLS_ALIYUN_LOG_CONFIG_CR_NAME:apigateway-access-log} \ No newline at end of file + cr-name: ${SLS_ALIYUN_LOG_CONFIG_CR_NAME:apigateway-access-log} diff --git a/himarket-bootstrap/src/main/resources/db/migration/V10__Add_nacos_default_fields.sql b/himarket-bootstrap/src/main/resources/db/migration/V10__Add_nacos_default_fields.sql new file mode 100644 index 000000000..c85642b79 --- /dev/null +++ b/himarket-bootstrap/src/main/resources/db/migration/V10__Add_nacos_default_fields.sql @@ -0,0 +1,8 @@ +-- 新增默认实例标记和默认命名空间字段 +ALTER TABLE nacos_instance ADD COLUMN is_default TINYINT(1) NOT NULL DEFAULT 0; +ALTER TABLE nacos_instance ADD COLUMN default_namespace VARCHAR(128) NOT NULL DEFAULT 'public'; + +-- 存量数据:将最早创建的实例标记为默认 +UPDATE nacos_instance ni +INNER JOIN (SELECT MIN(id) AS min_id FROM nacos_instance) t ON ni.id = t.min_id +SET ni.is_default = 1; diff --git a/himarket-bootstrap/src/main/resources/db/migration/V11__Drop_k8s_cluster_table.sql b/himarket-bootstrap/src/main/resources/db/migration/V11__Drop_k8s_cluster_table.sql new file mode 100644 index 000000000..7de900d02 --- /dev/null +++ b/himarket-bootstrap/src/main/resources/db/migration/V11__Drop_k8s_cluster_table.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS k8s_cluster; diff --git a/himarket-bootstrap/src/main/resources/db/migration/V12__Add_coding_session_table.sql b/himarket-bootstrap/src/main/resources/db/migration/V12__Add_coding_session_table.sql new file mode 100644 index 000000000..4bac18c9f --- /dev/null +++ b/himarket-bootstrap/src/main/resources/db/migration/V12__Add_coding_session_table.sql @@ -0,0 +1,14 @@ +CREATE TABLE IF NOT EXISTS `coding_session` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `session_id` varchar(64) NOT NULL, + `cli_session_id` varchar(128) NOT NULL, + `user_id` varchar(64) NOT NULL, + `title` varchar(255) DEFAULT NULL, + `provider_key` varchar(64) DEFAULT NULL, + `cwd` varchar(512) DEFAULT NULL, + `created_at` datetime(3) DEFAULT CURRENT_TIMESTAMP(3), + `updated_at` datetime(3) DEFAULT CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3), + PRIMARY KEY (`id`), + UNIQUE KEY `uk_session_id` (`session_id`), + KEY `idx_user_id` (`user_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; diff --git a/himarket-bootstrap/src/main/resources/db/migration/V13__Add_coding_session_model_fields.sql b/himarket-bootstrap/src/main/resources/db/migration/V13__Add_coding_session_model_fields.sql new file mode 100644 index 000000000..092a9987e --- /dev/null +++ b/himarket-bootstrap/src/main/resources/db/migration/V13__Add_coding_session_model_fields.sql @@ -0,0 +1,3 @@ +ALTER TABLE `coding_session` + ADD COLUMN `model_product_id` varchar(64) DEFAULT NULL, + ADD COLUMN `model_name` varchar(128) DEFAULT NULL; diff --git a/himarket-bootstrap/src/main/resources/db/migration/V7__Add_k8s_cluster_table.sql b/himarket-bootstrap/src/main/resources/db/migration/V7__Add_k8s_cluster_table.sql new file mode 100644 index 000000000..6f6aa7b50 --- /dev/null +++ b/himarket-bootstrap/src/main/resources/db/migration/V7__Add_k8s_cluster_table.sql @@ -0,0 +1,20 @@ +-- V6__Add_k8s_cluster_table.sql +-- Add k8s_cluster table for persistent K8s cluster configuration storage +-- Description: Support for K8s cluster management with kubeconfig persistence + +-- ======================================== +-- K8sCluster table +-- ======================================== +CREATE TABLE IF NOT EXISTS `k8s_cluster` ( + `id` bigint NOT NULL AUTO_INCREMENT, + `config_id` varchar(64) NOT NULL COMMENT '配置唯一标识(UUID)', + `cluster_name` varchar(128) NOT NULL COMMENT '集群名称', + `server_url` varchar(512) NOT NULL COMMENT 'K8s API Server 地址', + `kubeconfig` text NOT NULL COMMENT 'kubeconfig 内容(YAML 格式)', + `description` varchar(512) DEFAULT NULL COMMENT '集群描述', + `created_at` datetime(3) DEFAULT CURRENT_TIMESTAMP(3), + `updated_at` datetime(3) DEFAULT CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3), + PRIMARY KEY (`id`), + UNIQUE KEY `uk_config_id` (`config_id`), + KEY `idx_cluster_name` (`cluster_name`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='K8s 集群配置表'; diff --git a/himarket-bootstrap/src/main/resources/db/migration/V8__Add_skill_file_table.sql b/himarket-bootstrap/src/main/resources/db/migration/V8__Add_skill_file_table.sql new file mode 100644 index 000000000..c92941ccc --- /dev/null +++ b/himarket-bootstrap/src/main/resources/db/migration/V8__Add_skill_file_table.sql @@ -0,0 +1,13 @@ +ALTER TABLE product MODIFY COLUMN description VARCHAR(1000) DEFAULT NULL; + +CREATE TABLE skill_file ( + id BIGINT AUTO_INCREMENT PRIMARY KEY, + product_id VARCHAR(64) NOT NULL, + path VARCHAR(512) NOT NULL, + encoding VARCHAR(16) NOT NULL DEFAULT 'text', + content MEDIUMTEXT NOT NULL, + size INT DEFAULT 0, + created_at DATETIME(3) DEFAULT CURRENT_TIMESTAMP(3), + updated_at DATETIME(3) DEFAULT CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3), + UNIQUE KEY uk_product_path (product_id, path(191)) +) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; diff --git a/himarket-bootstrap/src/main/resources/db/migration/V9__Drop_skill_file_table.sql b/himarket-bootstrap/src/main/resources/db/migration/V9__Drop_skill_file_table.sql new file mode 100644 index 000000000..e51bca79f --- /dev/null +++ b/himarket-bootstrap/src/main/resources/db/migration/V9__Drop_skill_file_table.sql @@ -0,0 +1,2 @@ +-- Skill 文件存储已迁移至 Nacos,skill_file 表不再使用 +DROP TABLE IF EXISTS skill_file; diff --git a/himarket-bootstrap/src/main/resources/logback-spring.xml b/himarket-bootstrap/src/main/resources/logback-spring.xml index d1bfb5136..dfdf72a69 100644 --- a/himarket-bootstrap/src/main/resources/logback-spring.xml +++ b/himarket-bootstrap/src/main/resources/logback-spring.xml @@ -29,10 +29,18 @@ - + + + ${user.home}/himarket.log + + %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n + UTF-8 + + + @@ -53,4 +61,7 @@ + + + diff --git a/himarket-dal/src/main/java/com/alibaba/himarket/entity/CodingSession.java b/himarket-dal/src/main/java/com/alibaba/himarket/entity/CodingSession.java new file mode 100644 index 000000000..87f797b9c --- /dev/null +++ b/himarket-dal/src/main/java/com/alibaba/himarket/entity/CodingSession.java @@ -0,0 +1,48 @@ +package com.alibaba.himarket.entity; + +import jakarta.persistence.*; +import lombok.*; + +@Entity +@Table( + name = "coding_session", + uniqueConstraints = { + @UniqueConstraint( + columnNames = {"session_id"}, + name = "uk_session_id") + }) +@Data +@EqualsAndHashCode(callSuper = true) +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class CodingSession extends BaseEntity { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @Column(name = "session_id", nullable = false, unique = true, length = 64) + private String sessionId; + + @Column(name = "cli_session_id", nullable = false, length = 128) + private String cliSessionId; + + @Column(name = "user_id", nullable = false, length = 64) + private String userId; + + @Column(name = "title", length = 255) + private String title; + + @Column(name = "provider_key", length = 64) + private String providerKey; + + @Column(name = "cwd", length = 512) + private String cwd; + + @Column(name = "model_product_id", length = 64) + private String modelProductId; + + @Column(name = "model_name", length = 128) + private String modelName; +} diff --git a/himarket-dal/src/main/java/com/alibaba/himarket/entity/NacosInstance.java b/himarket-dal/src/main/java/com/alibaba/himarket/entity/NacosInstance.java index dd978893d..2817318c9 100644 --- a/himarket-dal/src/main/java/com/alibaba/himarket/entity/NacosInstance.java +++ b/himarket-dal/src/main/java/com/alibaba/himarket/entity/NacosInstance.java @@ -67,4 +67,12 @@ public class NacosInstance extends BaseEntity { @Column(name = "description", length = 512) private String description; + + @Column(name = "is_default", nullable = false) + @Builder.Default + private Boolean isDefault = false; + + @Column(name = "default_namespace", length = 128, nullable = false) + @Builder.Default + private String defaultNamespace = "public"; } diff --git a/himarket-dal/src/main/java/com/alibaba/himarket/entity/Product.java b/himarket-dal/src/main/java/com/alibaba/himarket/entity/Product.java index 9945ae8bd..30b555633 100644 --- a/himarket-dal/src/main/java/com/alibaba/himarket/entity/Product.java +++ b/himarket-dal/src/main/java/com/alibaba/himarket/entity/Product.java @@ -34,10 +34,7 @@ uniqueConstraints = { @UniqueConstraint( columnNames = {"product_id"}, - name = "uk_product_id"), - @UniqueConstraint( - columnNames = {"name"}, - name = "uk_name") + name = "uk_product_id") }) @Data @EqualsAndHashCode(callSuper = true) @@ -62,7 +59,7 @@ public class Product extends BaseEntity { @Enumerated(EnumType.STRING) private ProductType type; - @Column(name = "description", length = 256) + @Column(name = "description", length = 1000) private String description; @Column(name = "enable_consumer_auth") diff --git a/himarket-dal/src/main/java/com/alibaba/himarket/repository/CodingSessionRepository.java b/himarket-dal/src/main/java/com/alibaba/himarket/repository/CodingSessionRepository.java new file mode 100644 index 000000000..1408caa64 --- /dev/null +++ b/himarket-dal/src/main/java/com/alibaba/himarket/repository/CodingSessionRepository.java @@ -0,0 +1,21 @@ +package com.alibaba.himarket.repository; + +import com.alibaba.himarket.entity.CodingSession; +import java.util.Optional; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.stereotype.Repository; + +@Repository +public interface CodingSessionRepository extends BaseRepository { + + Optional findBySessionId(String sessionId); + + Optional findBySessionIdAndUserId(String sessionId, String userId); + + Page findByUserIdOrderByUpdatedAtDesc(String userId, Pageable pageable); + + int countByUserId(String userId); + + void deleteBySessionId(String sessionId); +} diff --git a/himarket-dal/src/main/java/com/alibaba/himarket/repository/NacosInstanceRepository.java b/himarket-dal/src/main/java/com/alibaba/himarket/repository/NacosInstanceRepository.java index 46a1ecc3e..3aee3452b 100644 --- a/himarket-dal/src/main/java/com/alibaba/himarket/repository/NacosInstanceRepository.java +++ b/himarket-dal/src/main/java/com/alibaba/himarket/repository/NacosInstanceRepository.java @@ -41,4 +41,11 @@ public interface NacosInstanceRepository extends BaseRepository findByNacosName(String nacosName); + + /** + * Find the default Nacos instance + * + * @return the default Nacos instance if found + */ + Optional findByIsDefaultTrue(); } diff --git a/himarket-dal/src/main/java/com/alibaba/himarket/support/enums/ProductType.java b/himarket-dal/src/main/java/com/alibaba/himarket/support/enums/ProductType.java index e939eac21..ae3f77f1b 100644 --- a/himarket-dal/src/main/java/com/alibaba/himarket/support/enums/ProductType.java +++ b/himarket-dal/src/main/java/com/alibaba/himarket/support/enums/ProductType.java @@ -29,5 +29,7 @@ public enum ProductType { AGENT_API, MODEL_API, + + AGENT_SKILL, ; } diff --git a/himarket-dal/src/main/java/com/alibaba/himarket/support/product/ProductFeature.java b/himarket-dal/src/main/java/com/alibaba/himarket/support/product/ProductFeature.java index 65f48899a..7363d81c1 100644 --- a/himarket-dal/src/main/java/com/alibaba/himarket/support/product/ProductFeature.java +++ b/himarket-dal/src/main/java/com/alibaba/himarket/support/product/ProductFeature.java @@ -25,4 +25,6 @@ public class ProductFeature { private ModelFeature modelFeature; + + private SkillConfig skillConfig; } diff --git a/himarket-dal/src/main/java/com/alibaba/himarket/support/product/SkillConfig.java b/himarket-dal/src/main/java/com/alibaba/himarket/support/product/SkillConfig.java new file mode 100644 index 000000000..7532e79c0 --- /dev/null +++ b/himarket-dal/src/main/java/com/alibaba/himarket/support/product/SkillConfig.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.alibaba.himarket.support.product; + +import java.util.List; +import lombok.Data; + +@Data +public class SkillConfig { + + /** + * 技能标签列表 + */ + private List skillTags; + + /** + * 下载次数 + */ + private Long downloadCount; + + /** + * 关联的 Nacos 实例 ID(nacos_instance.nacos_id) + */ + private String nacosId; + + /** + * Nacos 命名空间,默认 "public" + */ + private String namespace; + + /** + * Nacos Skill name(唯一标识) + */ + private String skillName; +} diff --git a/himarket-server/pom.xml b/himarket-server/pom.xml index 4653fd754..7c530abd8 100644 --- a/himarket-server/pom.xml +++ b/himarket-server/pom.xml @@ -197,6 +197,10 @@ jsr305 com.google.code.findbugs + + guava + com.google.guava + @@ -211,6 +215,17 @@ spring-boot-starter-webflux + + org.springframework.boot + spring-boot-starter-websocket + + + + org.jetbrains.pty4j + pty4j + 0.12.13 + + com.aliyun.openservices aliyun-log @@ -285,6 +300,20 @@ google-genai + + + net.jqwik + jqwik + test + + + + + org.apache.commons + commons-compress + 1.26.1 + + - \ No newline at end of file + diff --git a/himarket-server/src/main/java/com/alibaba/himarket/config/AcpProperties.java b/himarket-server/src/main/java/com/alibaba/himarket/config/AcpProperties.java new file mode 100644 index 000000000..c972ec281 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/config/AcpProperties.java @@ -0,0 +1,223 @@ +package com.alibaba.himarket.config; + +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import org.springframework.boot.context.properties.ConfigurationProperties; + +@ConfigurationProperties(prefix = "acp") +public class AcpProperties { + + /** + * 默认使用的 CLI provider key(对应 providers map 中的 key) + */ + private String defaultProvider = "qwen-code"; + + /** + * 默认运行时类型。 + * 当用户未主动选择运行时方案时使用此默认值。 + */ + private String defaultRuntime = "remote"; + + /** + * CLI provider 注册表,支持多种 ACP 兼容的 CLI 工具。 + * 每个 provider 定义了命令、参数和可选的环境变量。 + */ + private Map providers = new LinkedHashMap<>(); + + /** + * 远程沙箱运行时配置。 + * 支持 K8s Service、Docker 容器、裸机部署等任意可达的 Sidecar 服务。 + */ + private RemoteConfig remote = new RemoteConfig(); + + public String getDefaultProvider() { + return defaultProvider; + } + + public void setDefaultProvider(String defaultProvider) { + this.defaultProvider = defaultProvider; + } + + public String getDefaultRuntime() { + return defaultRuntime; + } + + public void setDefaultRuntime(String defaultRuntime) { + this.defaultRuntime = defaultRuntime; + } + + public Map getProviders() { + return providers; + } + + public void setProviders(Map providers) { + this.providers = providers; + } + + public RemoteConfig getRemote() { + return remote; + } + + public void setRemote(RemoteConfig remote) { + this.remote = remote; + } + + /** + * 根据 provider key 获取配置,找不到则返回 null。 + */ + public CliProviderConfig getProvider(String key) { + return providers.get(key); + } + + /** + * 获取默认 provider 配置。 + */ + public CliProviderConfig getDefaultProviderConfig() { + return providers.get(defaultProvider); + } + + /** + * 单个 CLI provider 的配置。 + */ + public static class CliProviderConfig { + + private String displayName; + private String command; + private String args = "--acp"; + private Map env = new LinkedHashMap<>(); + private List compatibleRuntimes; + private boolean supportsCustomModel = false; + private boolean supportsMcp = false; + private boolean supportsSkill = false; + private List authOptions; + private String authEnvVar; + + public String getDisplayName() { + return displayName; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + public String getCommand() { + return command; + } + + public void setCommand(String command) { + this.command = command; + } + + public String getArgs() { + return args; + } + + public void setArgs(String args) { + this.args = args; + } + + public Map getEnv() { + return env; + } + + public void setEnv(Map env) { + this.env = env; + } + + public List getCompatibleRuntimes() { + return compatibleRuntimes; + } + + public void setCompatibleRuntimes(List compatibleRuntimes) { + this.compatibleRuntimes = compatibleRuntimes; + } + + public boolean isSupportsCustomModel() { + return supportsCustomModel; + } + + public void setSupportsCustomModel(boolean supportsCustomModel) { + this.supportsCustomModel = supportsCustomModel; + } + + public boolean isSupportsMcp() { + return supportsMcp; + } + + public void setSupportsMcp(boolean supportsMcp) { + this.supportsMcp = supportsMcp; + } + + public boolean isSupportsSkill() { + return supportsSkill; + } + + public void setSupportsSkill(boolean supportsSkill) { + this.supportsSkill = supportsSkill; + } + + public List getAuthOptions() { + return authOptions; + } + + public void setAuthOptions(List authOptions) { + this.authOptions = authOptions; + } + + public String getAuthEnvVar() { + return authEnvVar; + } + + public void setAuthEnvVar(String authEnvVar) { + this.authEnvVar = authEnvVar; + } + } + + /** + * 远程沙箱运行时配置。 + * 不依赖 K8s API,只需 Sidecar 服务地址可达即可。 + */ + public static class RemoteConfig { + + /** + * Sidecar 服务地址。 + * 可以是: + * - K8s Service DNS: sandbox-shared.default.svc.cluster.local + * - Docker 容器名: sandbox + * - IP 地址: 192.168.1.100 + * - localhost(本地 Docker 部署) + * 留空表示未配置远程沙箱。 + */ + private String host = ""; + + /** + * Sidecar 服务端口,默认 8080。 + */ + private int port = 8080; + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public int getPort() { + return port; + } + + public void setPort(int port) { + this.port = port; + } + + /** + * 判断远程沙箱是否已配置(host 非空)。 + */ + public boolean isConfigured() { + return host != null && !host.isBlank(); + } + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/controller/CliProviderController.java b/himarket-server/src/main/java/com/alibaba/himarket/controller/CliProviderController.java new file mode 100644 index 000000000..850afa14e --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/controller/CliProviderController.java @@ -0,0 +1,406 @@ +package com.alibaba.himarket.controller; + +import com.alibaba.himarket.config.AcpProperties; +import com.alibaba.himarket.config.AcpProperties.CliProviderConfig; +import com.alibaba.himarket.core.annotation.DeveloperAuth; +import com.alibaba.himarket.core.security.ContextHolder; +import com.alibaba.himarket.dto.params.product.QueryProductParam; +import com.alibaba.himarket.dto.result.cli.MarketMcpInfo; +import com.alibaba.himarket.dto.result.cli.MarketMcpsResponse; +import com.alibaba.himarket.dto.result.cli.MarketModelInfo; +import com.alibaba.himarket.dto.result.cli.MarketModelsResponse; +import com.alibaba.himarket.dto.result.cli.MarketSkillInfo; +import com.alibaba.himarket.dto.result.common.PageResult; +import com.alibaba.himarket.dto.result.consumer.ConsumerCredentialResult; +import com.alibaba.himarket.dto.result.consumer.ConsumerResult; +import com.alibaba.himarket.dto.result.consumer.CredentialContext; +import com.alibaba.himarket.dto.result.model.ModelConfigResult; +import com.alibaba.himarket.dto.result.product.ProductResult; +import com.alibaba.himarket.dto.result.product.SubscriptionResult; +import com.alibaba.himarket.service.ConsumerService; +import com.alibaba.himarket.service.ProductService; +import com.alibaba.himarket.service.hicoding.cli.ProtocolTypeMapper; +import com.alibaba.himarket.service.hicoding.filesystem.BaseUrlExtractor; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import com.alibaba.himarket.support.chat.mcp.MCPTransportConfig; +import com.alibaba.himarket.support.consumer.ApiKeyConfig; +import com.alibaba.himarket.support.enums.MCPTransportMode; +import com.alibaba.himarket.support.enums.ProductStatus; +import com.alibaba.himarket.support.enums.ProductType; +import com.alibaba.himarket.support.enums.SubscriptionStatus; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.tags.Tag; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.data.domain.PageRequest; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +@Tag(name = "CLI Provider管理", description = "查询可用的 ACP CLI Provider 列表") +@RestController +@RequestMapping("/cli-providers") +@RequiredArgsConstructor +public class CliProviderController { + + private static final Logger logger = LoggerFactory.getLogger(CliProviderController.class); + + private final AcpProperties acpProperties; + private final ConsumerService consumerService; + private final ProductService productService; + private final ContextHolder contextHolder; + + @Operation(summary = "获取当前开发者已订阅的模型市场模型列表") + @GetMapping("/market-models") + @DeveloperAuth + public MarketModelsResponse listMarketModels() { + // 1. 获取 Primary Consumer + ConsumerResult consumer; + try { + consumer = consumerService.getPrimaryConsumer(); + } catch (Exception e) { + logger.debug("No primary consumer found for current developer: {}", e.getMessage()); + return MarketModelsResponse.builder() + .models(Collections.emptyList()) + .apiKey(null) + .build(); + } + + String consumerId = consumer.getConsumerId(); + + // 2. 获取订阅列表,筛选 APPROVED 状态 + List subscriptions = + consumerService.listConsumerSubscriptions(consumerId); + List approvedSubscriptions = + subscriptions.stream() + .filter(s -> SubscriptionStatus.APPROVED.name().equals(s.getStatus())) + .collect(Collectors.toList()); + + // 3. 获取 apiKey + String apiKey = extractApiKey(consumerId); + + if (approvedSubscriptions.isEmpty()) { + return MarketModelsResponse.builder() + .models(Collections.emptyList()) + .apiKey(apiKey) + .build(); + } + + // 4. 批量获取产品详情,然后按 MODEL_API 类型筛选 + List productIds = + approvedSubscriptions.stream() + .map(SubscriptionResult::getProductId) + .collect(Collectors.toList()); + Map productMap = productService.getProducts(productIds); + + // 5. 对每个产品提取信息,仅处理 MODEL_API 类型 + List models = new ArrayList<>(); + for (SubscriptionResult subscription : approvedSubscriptions) { + ProductResult product = productMap.get(subscription.getProductId()); + if (product == null) { + logger.warn( + "Product not found for subscription: productId={}", + subscription.getProductId()); + continue; + } + + // 通过产品详情中的 type 字段筛选 MODEL_API + if (product.getType() != ProductType.MODEL_API) { + continue; + } + + MarketModelInfo modelInfo = buildMarketModelInfo(product); + if (modelInfo != null) { + models.add(modelInfo); + } + } + + // 6. 组装响应 + return MarketModelsResponse.builder().models(models).apiKey(apiKey).build(); + } + + @Operation(summary = "获取当前开发者已订阅的 MCP Server 列表") + @GetMapping("/market-mcps") + @DeveloperAuth + public MarketMcpsResponse listMarketMcps() { + // 1. 获取 Primary Consumer + ConsumerResult consumer; + try { + consumer = consumerService.getPrimaryConsumer(); + } catch (Exception e) { + logger.debug("No primary consumer found for current developer: {}", e.getMessage()); + return MarketMcpsResponse.builder() + .mcpServers(Collections.emptyList()) + .authHeaders(null) + .build(); + } + + String consumerId = consumer.getConsumerId(); + + // 2. 获取订阅列表,筛选 APPROVED 状态 + List subscriptions = + consumerService.listConsumerSubscriptions(consumerId); + List approvedSubscriptions = + subscriptions.stream() + .filter(s -> SubscriptionStatus.APPROVED.name().equals(s.getStatus())) + .collect(Collectors.toList()); + + if (approvedSubscriptions.isEmpty()) { + return MarketMcpsResponse.builder() + .mcpServers(Collections.emptyList()) + .authHeaders(extractAuthHeaders()) + .build(); + } + + // 3. 批量获取产品详情,筛选 MCP_SERVER 类型 + List productIds = + approvedSubscriptions.stream() + .map(SubscriptionResult::getProductId) + .collect(Collectors.toList()); + Map productMap = productService.getProducts(productIds); + + // 4. 对每个产品提取 MCP 信息 + List mcpServers = new ArrayList<>(); + for (SubscriptionResult subscription : approvedSubscriptions) { + ProductResult product = productMap.get(subscription.getProductId()); + if (product == null) { + logger.warn( + "Product not found for subscription: productId={}", + subscription.getProductId()); + continue; + } + + if (product.getType() != ProductType.MCP_SERVER) { + continue; + } + + MarketMcpInfo mcpInfo = buildMarketMcpInfo(product); + if (mcpInfo != null) { + mcpServers.add(mcpInfo); + } + } + + // 5. 获取 CredentialContext 提取 authHeaders + Map authHeaders = extractAuthHeaders(); + + // 6. 组装响应 + return MarketMcpsResponse.builder().mcpServers(mcpServers).authHeaders(authHeaders).build(); + } + + @Operation(summary = "获取已发布的 Skill 列表") + @GetMapping("/market-skills") + public List listMarketSkills() { + QueryProductParam param = new QueryProductParam(); + param.setType(ProductType.AGENT_SKILL); + param.setStatus(ProductStatus.PUBLISHED); + param.setPortalId(contextHolder.getPortal()); + + PageResult pageResult = + productService.listProducts(param, PageRequest.of(0, 1000)); + + return pageResult.getContent().stream() + .map( + product -> { + List skillTags = null; + if (product.getFeature() != null + && product.getFeature().getSkillConfig() != null) { + skillTags = product.getFeature().getSkillConfig().getSkillTags(); + } + return MarketSkillInfo.builder() + .productId(product.getProductId()) + .name(product.getName()) + .description(product.getDescription()) + .skillTags(skillTags) + .build(); + }) + .collect(Collectors.toList()); + } + + private Map extractAuthHeaders() { + try { + CredentialContext credentialContext = + consumerService.getDefaultCredential(contextHolder.getUser()); + Map headers = credentialContext.copyHeaders(); + return headers.isEmpty() ? null : headers; + } catch (Exception e) { + logger.debug("Failed to get auth headers: {}", e.getMessage()); + return null; + } + } + + private MarketMcpInfo buildMarketMcpInfo(ProductResult product) { + if (product.getMcpConfig() == null) { + logger.warn( + "Product mcpConfig is incomplete, skipping: productId={}, name={}", + product.getProductId(), + product.getName()); + return null; + } + + try { + MCPTransportConfig transportConfig = product.getMcpConfig().toTransportConfig(); + if (transportConfig == null) { + logger.warn( + "Failed to extract transport config from product, skipping: productId={}," + + " name={}", + product.getProductId(), + product.getName()); + return null; + } + + String transportType = + transportConfig.getTransportMode() == MCPTransportMode.STREAMABLE_HTTP + ? "streamable-http" + : "sse"; + + return MarketMcpInfo.builder() + .productId(product.getProductId()) + .name(transportConfig.getMcpServerName()) + .url(transportConfig.getUrl()) + .transportType(transportType) + .description(product.getDescription()) + .build(); + } catch (Exception e) { + logger.warn( + "Error processing mcpConfig for product, skipping: productId={}, name={}," + + " error={}", + product.getProductId(), + product.getName(), + e.getMessage()); + return null; + } + } + + private String extractApiKey(String consumerId) { + try { + ConsumerCredentialResult credential = consumerService.getCredential(consumerId); + if (credential == null || credential.getApiKeyConfig() == null) { + return null; + } + ApiKeyConfig apiKeyConfig = credential.getApiKeyConfig(); + if (apiKeyConfig.getCredentials() == null || apiKeyConfig.getCredentials().isEmpty()) { + return null; + } + return apiKeyConfig.getCredentials().get(0).getApiKey(); + } catch (Exception e) { + logger.debug( + "Failed to get credential for consumer {}: {}", consumerId, e.getMessage()); + return null; + } + } + + private MarketModelInfo buildMarketModelInfo(ProductResult product) { + // 提取 modelId + String modelId = null; + if (product.getFeature() != null + && product.getFeature().getModelFeature() != null + && product.getFeature().getModelFeature().getModel() != null) { + modelId = product.getFeature().getModelFeature().getModel(); + } + + // 提取 baseUrl + ModelConfigResult modelConfig = product.getModelConfig(); + if (modelConfig == null || modelConfig.getModelAPIConfig() == null) { + logger.warn( + "Product modelConfig is incomplete, skipping: productId={}, name={}", + product.getProductId(), + product.getName()); + return null; + } + + String baseUrl = BaseUrlExtractor.extract(modelConfig.getModelAPIConfig().getRoutes()); + if (baseUrl == null) { + logger.warn( + "Failed to extract baseUrl from product routes, skipping: productId={}," + + " name={}", + product.getProductId(), + product.getName()); + return null; + } + + // 提取 protocolType + String protocolType = + ProtocolTypeMapper.map(modelConfig.getModelAPIConfig().getAiProtocols()); + + return MarketModelInfo.builder() + .productId(product.getProductId()) + .name(product.getName()) + .modelId(modelId) + .baseUrl(baseUrl) + .protocolType(protocolType) + .description(product.getDescription()) + .build(); + } + + @Operation(summary = "获取可用的 CLI Provider 列表(含运行时兼容性信息)") + @GetMapping + public List listProviders() { + List result = new ArrayList<>(); + String defaultKey = acpProperties.getDefaultProvider(); + for (Map.Entry entry : acpProperties.getProviders().entrySet()) { + CliProviderConfig config = entry.getValue(); + // 兼容 K8S 运行时的 Provider 可在沙箱中运行,无需本机安装命令 + boolean canRunInSandbox = + config.getCompatibleRuntimes() != null + && config.getCompatibleRuntimes().contains(SandboxType.REMOTE); + boolean available = canRunInSandbox || isCommandAvailable(config.getCommand()); + result.add( + new CliProviderInfo( + entry.getKey(), + config.getDisplayName() != null + ? config.getDisplayName() + : entry.getKey(), + entry.getKey().equals(defaultKey), + available, + config.getCompatibleRuntimes(), + config.isSupportsCustomModel(), + config.isSupportsMcp(), + config.isSupportsSkill(), + config.getAuthOptions(), + config.getAuthEnvVar())); + } + return result; + } + + /** + * 检测命令是否在系统 PATH 中可用。 + * 对于 npx 类命令,只检查 npx 本身是否存在(包会按需下载)。 + */ + static boolean isCommandAvailable(String command) { + if (command == null || command.isBlank()) { + return false; + } + try { + ProcessBuilder pb = new ProcessBuilder("which", command).redirectErrorStream(true); + Process process = pb.start(); + boolean exited = process.waitFor(5, java.util.concurrent.TimeUnit.SECONDS); + if (!exited) { + process.destroyForcibly(); + return false; + } + return process.exitValue() == 0; + } catch (Exception e) { + logger.debug( + "Failed to check command availability for '{}': {}", command, e.getMessage()); + return false; + } + } + + public record CliProviderInfo( + String key, + String displayName, + boolean isDefault, + boolean available, + List compatibleRuntimes, + boolean supportsCustomModel, + boolean supportsMcp, + boolean supportsSkill, + List authOptions, + String authEnvVar) {} +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/controller/CodingSessionController.java b/himarket-server/src/main/java/com/alibaba/himarket/controller/CodingSessionController.java new file mode 100644 index 000000000..4df43d8f5 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/controller/CodingSessionController.java @@ -0,0 +1,44 @@ +package com.alibaba.himarket.controller; + +import com.alibaba.himarket.core.annotation.DeveloperAuth; +import com.alibaba.himarket.dto.params.coding.CreateCodingSessionParam; +import com.alibaba.himarket.dto.params.coding.UpdateCodingSessionParam; +import com.alibaba.himarket.dto.result.coding.CodingSessionResult; +import com.alibaba.himarket.dto.result.common.PageResult; +import com.alibaba.himarket.service.CodingSessionService; +import jakarta.validation.Valid; +import lombok.RequiredArgsConstructor; +import org.springframework.data.domain.Pageable; +import org.springframework.validation.annotation.Validated; +import org.springframework.web.bind.annotation.*; + +@RestController +@RequestMapping("/coding-sessions") +@RequiredArgsConstructor +@Validated +@DeveloperAuth +public class CodingSessionController { + + private final CodingSessionService codingSessionService; + + @PostMapping + public CodingSessionResult createSession(@Valid @RequestBody CreateCodingSessionParam param) { + return codingSessionService.createSession(param); + } + + @GetMapping + public PageResult listSessions(Pageable pageable) { + return codingSessionService.listSessions(pageable); + } + + @PatchMapping("/{sessionId}") + public CodingSessionResult updateSession( + @PathVariable String sessionId, @Valid @RequestBody UpdateCodingSessionParam param) { + return codingSessionService.updateSession(sessionId, param); + } + + @DeleteMapping("/{sessionId}") + public void deleteSession(@PathVariable String sessionId) { + codingSessionService.deleteSession(sessionId); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/controller/NacosController.java b/himarket-server/src/main/java/com/alibaba/himarket/controller/NacosController.java index b8ef7d0ce..25a11d2b9 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/controller/NacosController.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/controller/NacosController.java @@ -59,6 +59,25 @@ public PageResult fetchNacos(@Valid QueryNacosParam param, Pagea return nacosService.fetchNacos(param, pageable); } + @Operation(summary = "获取默认Nacos实例") + @GetMapping("/default") + public NacosResult getDefaultNacosInstance() { + return nacosService.getDefaultNacosInstance(); + } + + @Operation(summary = "设置默认Nacos实例", description = "设为默认后,新建的 Agent Skill 将自动绑定该 Nacos 实例") + @PutMapping("/{nacosId}/default") + public void setDefaultNacosInstance(@PathVariable String nacosId) { + nacosService.setDefaultNacosInstance(nacosId); + } + + @Operation(summary = "设置默认命名空间", description = "用已保存的认证信息连接 Nacos 验证命名空间存在后,设置为该实例的默认命名空间") + @PutMapping("/{nacosId}/default-namespace") + public void setDefaultNamespace( + @PathVariable String nacosId, @RequestParam("namespaceId") String namespaceId) { + nacosService.setDefaultNamespace(nacosId, namespaceId); + } + @Operation(summary = "获取Nacos实例详情", description = "根据ID获取Nacos实例详细信息") @GetMapping("/{nacosId}") public NacosResult getNacosInstance(@PathVariable String nacosId) { diff --git a/himarket-server/src/main/java/com/alibaba/himarket/controller/ProductController.java b/himarket-server/src/main/java/com/alibaba/himarket/controller/ProductController.java index b53402e4b..70e188528 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/controller/ProductController.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/controller/ProductController.java @@ -173,4 +173,14 @@ public void reloadProductConfig(@PathVariable String productId) { public McpToolListResult listMcpTools(@PathVariable String productId) { return productService.listMcpTools(productId); } + + @Operation(summary = "更新 Skill 的 Nacos 关联") + @PutMapping("/{productId}/skill-nacos") + @AdminAuth + public void updateSkillNacos( + @PathVariable String productId, + @RequestBody @Valid + com.alibaba.himarket.dto.params.product.UpdateSkillNacosParam param) { + productService.updateSkillNacos(productId, param.getNacosId(), param.getNamespace()); + } } diff --git a/himarket-server/src/main/java/com/alibaba/himarket/controller/SkillController.java b/himarket-server/src/main/java/com/alibaba/himarket/controller/SkillController.java new file mode 100644 index 000000000..2d1d75f0b --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/controller/SkillController.java @@ -0,0 +1,205 @@ +package com.alibaba.himarket.controller; + +import com.alibaba.himarket.core.annotation.AdminAuth; +import com.alibaba.himarket.core.annotation.AdminOrDeveloperAuth; +import com.alibaba.himarket.core.exception.BusinessException; +import com.alibaba.himarket.core.exception.ErrorCode; +import com.alibaba.himarket.core.skill.SkillZipParser; +import com.alibaba.himarket.dto.result.skill.SkillFileContentResult; +import com.alibaba.himarket.dto.result.skill.SkillFileTreeNode; +import com.alibaba.himarket.entity.Product; +import com.alibaba.himarket.repository.ProductRepository; +import com.alibaba.himarket.service.SkillService; +import com.alibaba.himarket.support.product.ProductFeature; +import com.alibaba.himarket.support.product.SkillConfig; +import com.alibaba.nacos.api.ai.model.skills.Skill; +import com.alibaba.nacos.api.ai.model.skills.SkillBasicInfo; +import com.alibaba.nacos.api.model.Page; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.util.List; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.http.MediaType; +import org.springframework.web.bind.annotation.*; +import org.springframework.web.multipart.MultipartFile; + +@Tag(name = "Skill 管理", description = "Skill CRUD 操作和视图查询") +@RestController +@RequestMapping("/skills") +@Slf4j +@RequiredArgsConstructor +public class SkillController { + + private static final long MAX_ZIP_SIZE = 10 * 1024 * 1024; + + private final SkillService skillService; + private final ProductRepository productRepository; + + // ==================== 通过 productId 解析 Nacos 坐标的接口(前端使用) ==================== + + /** + * 从 Product 的 skillConfig 中解析 nacosId、namespace、skillName。 + */ + private SkillCoordinate resolveSkillCoordinate(String productId) { + Product product = + productRepository + .findByProductId(productId) + .orElseThrow( + () -> + new BusinessException( + ErrorCode.NOT_FOUND, "Product", productId)); + ProductFeature feature = product.getFeature(); + if (feature == null || feature.getSkillConfig() == null) { + throw new BusinessException( + ErrorCode.INVALID_REQUEST, "该产品未关联 Nacos 实例,请先在 Link Nacos 页面关联"); + } + SkillConfig sc = feature.getSkillConfig(); + if (sc.getNacosId() == null || sc.getNacosId().isBlank()) { + throw new BusinessException( + ErrorCode.INVALID_REQUEST, "该产品未关联 Nacos 实例,请先在 Link Nacos 页面关联"); + } + String namespace = sc.getNamespace() != null ? sc.getNamespace() : "public"; + return new SkillCoordinate(sc.getNacosId(), namespace, sc.getSkillName()); + } + + private record SkillCoordinate(String nacosId, String namespace, String skillName) {} + + @Operation(summary = "ZIP 上传 Skill(通过 productId)") + @PostMapping(value = "/{productId}/package", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) + @AdminAuth + public String uploadSkillByProduct( + @PathVariable String productId, @RequestParam("file") MultipartFile file) + throws IOException { + if (file.isEmpty()) { + throw new BusinessException(ErrorCode.INVALID_PARAMETER, "ZIP 文件不能为空"); + } + if (file.getSize() > MAX_ZIP_SIZE) { + throw new BusinessException(ErrorCode.INVALID_PARAMETER, "ZIP 文件大小不能超过 10MB"); + } + SkillCoordinate coord = resolveSkillCoordinate(productId); + Skill skill = SkillZipParser.parseSkillFromZip(file.getBytes(), coord.namespace()); + String skillName = skillService.createSkill(coord.nacosId(), coord.namespace(), skill); + + // 回写 skillName 和 description 到 product + Product product = productRepository.findByProductId(productId).orElseThrow(); + product.getFeature().getSkillConfig().setSkillName(skillName); + product.setDescription(skill.getDescription()); + productRepository.save(product); + + return skillName; + } + + @Operation(summary = "获取文件树(通过 productId)") + @GetMapping("/{productId}/files") + @AdminOrDeveloperAuth + public List getFileTreeByProduct(@PathVariable String productId) { + SkillCoordinate coord = resolveSkillCoordinate(productId); + if (coord.skillName() == null || coord.skillName().isBlank()) { + return List.of(); + } + return skillService.getFileTree(coord.nacosId(), coord.namespace(), coord.skillName()); + } + + @Operation(summary = "获取单文件内容(通过 productId)") + @GetMapping("/{productId}/files/{*filePath}") + @AdminOrDeveloperAuth + public SkillFileContentResult getFileContentByProduct( + @PathVariable String productId, @PathVariable String filePath) { + SkillCoordinate coord = resolveSkillCoordinate(productId); + if (coord.skillName() == null || coord.skillName().isBlank()) { + throw new BusinessException(ErrorCode.NOT_FOUND, "Skill 尚未上传"); + } + // 去掉前导斜杠 + String path = filePath.startsWith("/") ? filePath.substring(1) : filePath; + return skillService.getFileContent( + coord.nacosId(), coord.namespace(), coord.skillName(), path); + } + + @Operation(summary = "ZIP 下载 Skill(通过 productId)") + @GetMapping("/{productId}/download") + public void downloadSkillByProduct(@PathVariable String productId, HttpServletResponse response) + throws IOException { + SkillCoordinate coord = resolveSkillCoordinate(productId); + if (coord.skillName() == null || coord.skillName().isBlank()) { + throw new BusinessException(ErrorCode.NOT_FOUND, "Skill 尚未上传"); + } + skillService.downloadZip(coord.nacosId(), coord.namespace(), coord.skillName(), response); + } + + // ==================== 直接通过 nacosId 操作的接口(高级管理) ==================== + + @Operation(summary = "创建 Skill(直接 Nacos)") + @PostMapping("/nacos") + @AdminAuth + public String createSkill( + @RequestParam String nacosId, + @RequestParam(defaultValue = "public") String namespace, + @RequestBody Skill skill) { + return skillService.createSkill(nacosId, namespace, skill); + } + + @Operation(summary = "ZIP 上传创建 Skill(直接 Nacos)") + @PostMapping(value = "/nacos/upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) + @AdminAuth + public String uploadSkill( + @RequestParam String nacosId, + @RequestParam(defaultValue = "public") String namespace, + @RequestParam("file") MultipartFile file) + throws IOException { + if (file.isEmpty()) { + throw new BusinessException(ErrorCode.INVALID_PARAMETER, "ZIP 文件不能为空"); + } + if (file.getSize() > MAX_ZIP_SIZE) { + throw new BusinessException(ErrorCode.INVALID_PARAMETER, "ZIP 文件大小不能超过 10MB"); + } + Skill skill = SkillZipParser.parseSkillFromZip(file.getBytes(), namespace); + return skillService.createSkill(nacosId, namespace, skill); + } + + @Operation(summary = "分页查询 Skill 列表(直接 Nacos)") + @GetMapping("/nacos") + @AdminAuth + public Page listSkills( + @RequestParam String nacosId, + @RequestParam(defaultValue = "public") String namespace, + @RequestParam(required = false) String search, + @RequestParam(defaultValue = "1") int pageNo, + @RequestParam(defaultValue = "20") int pageSize) { + return skillService.listSkills(nacosId, namespace, search, pageNo, pageSize); + } + + @Operation(summary = "查询 Skill 详情(直接 Nacos)") + @GetMapping("/nacos/{name}") + @AdminAuth + public Skill getSkillDetail( + @RequestParam String nacosId, + @RequestParam(defaultValue = "public") String namespace, + @PathVariable String name) { + return skillService.getSkillDetail(nacosId, namespace, name); + } + + @Operation(summary = "更新 Skill(直接 Nacos)") + @PutMapping("/nacos/{name}") + @AdminAuth + public void updateSkill( + @RequestParam String nacosId, + @RequestParam(defaultValue = "public") String namespace, + @PathVariable String name, + @RequestBody Skill skill) { + skill.setName(name); + skillService.updateSkill(nacosId, namespace, skill); + } + + @Operation(summary = "删除 Skill(直接 Nacos)") + @DeleteMapping("/nacos/{name}") + @AdminAuth + public void deleteSkill( + @RequestParam String nacosId, + @RequestParam(defaultValue = "public") String namespace, + @PathVariable String name) { + skillService.deleteSkill(nacosId, namespace, name); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/controller/WorkspaceController.java b/himarket-server/src/main/java/com/alibaba/himarket/controller/WorkspaceController.java new file mode 100644 index 000000000..38643d693 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/controller/WorkspaceController.java @@ -0,0 +1,201 @@ +package com.alibaba.himarket.controller; + +import com.alibaba.himarket.core.annotation.AdminOrDeveloperAuth; +import com.alibaba.himarket.core.exception.BusinessException; +import com.alibaba.himarket.core.exception.ErrorCode; +import com.alibaba.himarket.service.hicoding.RemoteWorkspaceService; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.tags.Tag; +import java.io.IOException; +import java.nio.file.Paths; +import java.util.List; +import java.util.Map; +import java.util.Set; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.multipart.MultipartFile; + +@Tag(name = "Workspace", description = "Read files from user workspace") +@RestController +@RequestMapping("/workspace") +@RequiredArgsConstructor +@Slf4j +@AdminOrDeveloperAuth +public class WorkspaceController { + + private static final Set IMAGE_EXTENSIONS = + Set.of(".png", ".jpg", ".jpeg", ".gif", ".webp"); + + private static final Set BINARY_EXTENSIONS = Set.of(".pdf"); + + private static final Set CONVERTIBLE_EXTENSIONS = Set.of(".pptx", ".ppt"); + + private final RemoteWorkspaceService remoteWorkspaceService; + + @Operation(summary = "Upload file to workspace") + @PostMapping(value = "/upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) + public ResponseEntity uploadFile(@RequestParam("file") MultipartFile file) { + if (file.isEmpty()) { + return ResponseEntity.badRequest().body(Map.of("error", "文件不能为空")); + } + if (file.getSize() > 5 * 1024 * 1024) { + return ResponseEntity.badRequest().body(Map.of("error", "文件大小不能超过 5MB")); + } + String userId = getCurrentUserId(); + try { + String filePath = + remoteWorkspaceService.uploadFile( + userId, file.getOriginalFilename(), file.getBytes()); + return ResponseEntity.ok(Map.of("filePath", filePath)); + } catch (IOException e) { + log.error("Failed to upload file to sandbox: user={}", userId, e); + return ResponseEntity.internalServerError().body(Map.of("error", "文件上传失败")); + } + } + + @Operation(summary = "Read file content from workspace") + @GetMapping("/file") + public ResponseEntity readFile( + @RequestParam String path, + @RequestParam(defaultValue = "false") boolean raw, + @RequestParam(required = false) String runtime) { + String userId = getCurrentUserId(); + + try { + String ext = getExtension(Paths.get(path).getFileName().toString()); + boolean isBinary = + IMAGE_EXTENSIONS.contains(ext) + || BINARY_EXTENSIONS.contains(ext) + || CONVERTIBLE_EXTENSIONS.contains(ext); + String encoding = isBinary ? "base64" : "utf-8"; + Map result = + remoteWorkspaceService.readFileWithEncoding(userId, path, encoding); + return ResponseEntity.ok( + Map.of( + "content", result.get("content"), + "encoding", result.get("encoding"))); + } catch (IOException e) { + log.error("Failed to read file from sandbox: user={}, path={}", userId, path, e); + return ResponseEntity.internalServerError() + .body(Map.of("error", "Failed to read file from sandbox")); + } + } + + @Operation(summary = "Download file from workspace as binary stream") + @GetMapping("/download") + public ResponseEntity downloadFile( + @RequestParam String path, @RequestParam(required = false) String runtime) { + String userId = getCurrentUserId(); + String fileName = Paths.get(path).getFileName().toString(); + String ext = getExtension(fileName); + String mime = getMimeType(ext); + + try { + byte[] bytes = remoteWorkspaceService.readFileBytes(userId, path); + return ResponseEntity.ok() + .header( + "Content-Disposition", + "attachment; filename=\"" + sanitizeFileName(fileName) + "\"") + .contentType(MediaType.parseMediaType(mime)) + .contentLength(bytes.length) + .body(bytes); + } catch (IOException e) { + log.error("Failed to download file {} for user {}", path, userId, e); + return ResponseEntity.internalServerError().build(); + } + } + + private static String getMimeType(String ext) { + return switch (ext) { + case ".pptx" -> + "application/vnd.openxmlformats-officedocument.presentationml.presentation"; + case ".ppt" -> "application/vnd.ms-powerpoint"; + case ".docx" -> + "application/vnd.openxmlformats-officedocument.wordprocessingml.document"; + case ".doc" -> "application/msword"; + case ".xlsx" -> "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"; + case ".xls" -> "application/vnd.ms-excel"; + case ".pdf" -> "application/pdf"; + case ".zip" -> "application/zip"; + case ".mp4" -> "video/mp4"; + case ".mp3" -> "audio/mpeg"; + default -> "application/octet-stream"; + }; + } + + @Operation(summary = "List changed files in workspace directory") + @GetMapping("/changes") + public ResponseEntity listWorkspaceChanges( + @RequestParam String cwd, + @RequestParam long since, + @RequestParam(defaultValue = "200") int limit, + @RequestParam(required = false) String runtime) { + String userId = getCurrentUserId(); + + try { + List> changes = + remoteWorkspaceService.getChanges(userId, cwd, since); + return ResponseEntity.ok(Map.of("changes", changes)); + } catch (IOException e) { + log.error("Failed to get changes from sandbox: user={}, cwd={}", userId, cwd, e); + return ResponseEntity.internalServerError() + .body(Map.of("error", "Failed to get changes from sandbox")); + } + } + + // ======================== Directory Tree API ======================== + + @Operation(summary = "Get directory tree for workspace") + @GetMapping("/tree") + public ResponseEntity getDirectoryTree( + @RequestParam String cwd, + @RequestParam(defaultValue = "3") int depth, + @RequestParam(required = false) String runtime) { + String userId = getCurrentUserId(); + + try { + Map tree = remoteWorkspaceService.getDirectoryTree(userId, cwd, depth); + return ResponseEntity.ok(tree); + } catch (IOException e) { + log.error("Failed to get directory tree from sandbox: user={}, cwd={}", userId, cwd, e); + return ResponseEntity.internalServerError() + .body(Map.of("error", "Failed to get directory tree from sandbox")); + } + } + + private String getCurrentUserId() { + var auth = SecurityContextHolder.getContext().getAuthentication(); + if (auth != null && auth.getPrincipal() instanceof String principal) { + return principal; + } + throw new BusinessException(ErrorCode.UNAUTHORIZED, "用户未认证"); + } + + private static String getExtension(String fileName) { + int lastDot = fileName.lastIndexOf('.'); + if (lastDot < 0) return ""; + return fileName.substring(lastDot).toLowerCase(); + } + + private static String sanitizeFileName(String name) { + if (name == null || name.isBlank()) { + return "unnamed"; + } + // Strip path separators and keep only the filename part + String baseName = name; + int lastSlash = Math.max(baseName.lastIndexOf('/'), baseName.lastIndexOf('\\')); + if (lastSlash >= 0) { + baseName = baseName.substring(lastSlash + 1); + } + // Replace any remaining dangerous characters + return baseName.replaceAll("[^a-zA-Z0-9._-]", "_"); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/core/advice/ResponseAdvice.java b/himarket-server/src/main/java/com/alibaba/himarket/core/advice/ResponseAdvice.java index 94c4e18f8..09c565807 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/core/advice/ResponseAdvice.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/core/advice/ResponseAdvice.java @@ -34,6 +34,7 @@ import org.springframework.web.servlet.mvc.method.annotation.ResponseBodyAdvice; import org.springframework.web.servlet.mvc.method.annotation.SseEmitter; import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; /** * Unified response wrapper @@ -66,7 +67,8 @@ public boolean supports( return !type.equals(ResponseEntity.class) && !type.equals(Response.class) && !type.equals(SseEmitter.class) - && !type.equals(Flux.class); + && !type.equals(Flux.class) + && !type.equals(Mono.class); } @Override diff --git a/himarket-server/src/main/java/com/alibaba/himarket/core/exception/ErrorCode.java b/himarket-server/src/main/java/com/alibaba/himarket/core/exception/ErrorCode.java index f8c8c0bfb..1de34ffd7 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/core/exception/ErrorCode.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/core/exception/ErrorCode.java @@ -51,6 +51,15 @@ public enum ErrorCode { /** Gateway error */ GATEWAY_ERROR(HttpStatus.INTERNAL_SERVER_ERROR, "网关错误:{}"), + + /** Sandbox not ready (Pod 未就绪) */ + SANDBOX_NOT_READY(HttpStatus.SERVICE_UNAVAILABLE, "沙箱未就绪:{}"), + + /** Sandbox connection failed (Sidecar 不可达) */ + SANDBOX_CONNECTION_FAILED(HttpStatus.BAD_GATEWAY, "沙箱连接失败:{}"), + + /** Sandbox error (Sidecar 返回错误) */ + SANDBOX_ERROR(HttpStatus.BAD_GATEWAY, "{}"), ; private final HttpStatus status; diff --git a/himarket-server/src/main/java/com/alibaba/himarket/core/skill/FileTreeBuilder.java b/himarket-server/src/main/java/com/alibaba/himarket/core/skill/FileTreeBuilder.java new file mode 100644 index 000000000..610117ef7 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/core/skill/FileTreeBuilder.java @@ -0,0 +1,125 @@ +package com.alibaba.himarket.core.skill; + +import com.alibaba.himarket.dto.result.skill.SkillFileTreeNode; +import com.alibaba.nacos.api.ai.model.skills.Skill; +import com.alibaba.nacos.api.ai.model.skills.SkillResource; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +/** + * 从 Nacos Skill 的 resource Map 构建文件树。 + * 在根节点下添加 SKILL.md 虚拟节点。 + * 按目录优先、同类型按名称字母序排列。 + */ +public final class FileTreeBuilder { + + private FileTreeBuilder() {} + + public static List build(Skill skill) { + Map dirMap = new LinkedHashMap<>(); + List rootChildren = new ArrayList<>(); + + // 添加 SKILL.md 虚拟节点 + String skillMdContent = SkillMdBuilder.build(skill); + SkillFileTreeNode skillMdNode = new SkillFileTreeNode(); + skillMdNode.setName("SKILL.md"); + skillMdNode.setPath("SKILL.md"); + skillMdNode.setType("file"); + skillMdNode.setEncoding("text"); + skillMdNode.setSize(skillMdContent.getBytes(StandardCharsets.UTF_8).length); + rootChildren.add(skillMdNode); + + // 从 resource Map 构建文件节点 + if (skill.getResource() != null) { + for (Map.Entry entry : skill.getResource().entrySet()) { + String resourceKey = entry.getKey(); + SkillResource resource = entry.getValue(); + addResourceNode(rootChildren, dirMap, resourceKey, resource); + } + } + + // 排序:目录优先,同类型按名称字母序 + sortNodes(rootChildren); + return rootChildren; + } + + private static void addResourceNode( + List rootChildren, + Map dirMap, + String resourceKey, + SkillResource resource) { + // 构建完整路径:type 非空时拼接 type/name,否则只用 name + String name = resource.getName() != null ? resource.getName() : resourceKey; + String type = resource.getType(); + String path; + if (type != null && !type.isEmpty()) { + path = type + "/" + name; + } else { + path = name; + } + String[] parts = path.split("/"); + + if (parts.length == 1) { + // 根级文件 + rootChildren.add(createFileNode(path, path, resource)); + } else { + // 需要创建目录层级 + List currentLevel = rootChildren; + StringBuilder currentPath = new StringBuilder(); + for (int i = 0; i < parts.length - 1; i++) { + if (currentPath.length() > 0) currentPath.append("/"); + currentPath.append(parts[i]); + String dirPath = currentPath.toString(); + + SkillFileTreeNode dirNode = dirMap.get(dirPath); + if (dirNode == null) { + dirNode = new SkillFileTreeNode(); + dirNode.setName(parts[i]); + dirNode.setPath(dirPath); + dirNode.setType("directory"); + dirNode.setChildren(new ArrayList<>()); + dirMap.put(dirPath, dirNode); + currentLevel.add(dirNode); + } + currentLevel = dirNode.getChildren(); + } + currentLevel.add(createFileNode(parts[parts.length - 1], path, resource)); + } + } + + private static SkillFileTreeNode createFileNode( + String name, String path, SkillResource resource) { + SkillFileTreeNode node = new SkillFileTreeNode(); + node.setName(name); + node.setPath(path); + node.setType("file"); + String content = resource.getContent(); + boolean isBinary = isBinaryContent(resource); + node.setEncoding(isBinary ? "base64" : "text"); + node.setSize(content != null ? content.getBytes(StandardCharsets.UTF_8).length : 0); + return node; + } + + private static boolean isBinaryContent(SkillResource resource) { + if (resource.getMetadata() != null) { + Object encoding = resource.getMetadata().get("encoding"); + if ("base64".equals(encoding)) return true; + } + return false; + } + + private static void sortNodes(List nodes) { + nodes.sort( + Comparator.comparing((SkillFileTreeNode n) -> "file".equals(n.getType()) ? 1 : 0) + .thenComparing(SkillFileTreeNode::getName, String.CASE_INSENSITIVE_ORDER)); + for (SkillFileTreeNode node : nodes) { + if (node.getChildren() != null && !node.getChildren().isEmpty()) { + sortNodes(node.getChildren()); + } + } + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillMdBuilder.java b/himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillMdBuilder.java new file mode 100644 index 000000000..74ba13297 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillMdBuilder.java @@ -0,0 +1,29 @@ +package com.alibaba.himarket.core.skill; + +import com.alibaba.nacos.api.ai.model.skills.Skill; + +/** + * 从 Nacos Skill 对象拼装 SKILL.md 内容。 + * 格式:YAML frontmatter(name、description)+ instruction 正文。 + */ +public final class SkillMdBuilder { + + private SkillMdBuilder() {} + + /** + * 从 Skill 对象生成 SKILL.md 内容。 + */ + public static String build(Skill skill) { + StringBuilder sb = new StringBuilder(); + sb.append("---\n"); + sb.append("name: ").append(skill.getName() != null ? skill.getName() : "").append("\n"); + sb.append("description: ") + .append(skill.getDescription() != null ? skill.getDescription() : "") + .append("\n"); + sb.append("---\n\n"); + if (skill.getInstruction() != null) { + sb.append(skill.getInstruction()); + } + return sb.toString(); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillMdDocument.java b/himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillMdDocument.java new file mode 100644 index 000000000..c6f0538a8 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillMdDocument.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.alibaba.himarket.core.skill; + +import java.util.Map; +import java.util.Objects; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * SKILL.md 文档模型,包含 YAML frontmatter 和 Markdown body。 + */ +@Data +@NoArgsConstructor +@AllArgsConstructor +public class SkillMdDocument { + + /** YAML frontmatter 键值对 */ + private Map frontmatter; + + /** Markdown 正文 */ + private String body; + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SkillMdDocument that = (SkillMdDocument) o; + return Objects.equals(frontmatter, that.frontmatter) && Objects.equals(body, that.body); + } + + @Override + public int hashCode() { + return Objects.hash(frontmatter, body); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillMdParser.java b/himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillMdParser.java new file mode 100644 index 000000000..168bcb647 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillMdParser.java @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.alibaba.himarket.core.skill; + +import com.alibaba.himarket.core.exception.BusinessException; +import com.alibaba.himarket.core.exception.ErrorCode; +import java.util.LinkedHashMap; +import java.util.Map; +import org.springframework.stereotype.Component; +import org.yaml.snakeyaml.Yaml; +import org.yaml.snakeyaml.error.YAMLException; + +/** + * SKILL.md 解析器,处理 YAML frontmatter + Markdown 格式的解析与序列化。 + */ +@Component +public class SkillMdParser { + + private static final String DELIMITER = "---"; + + private final Yaml yaml = new Yaml(); + + /** + * 解析 SKILL.md 内容,提取 YAML frontmatter 和 Markdown body。 + * + * @param content SKILL.md 原始文本 + * @return 解析后的 SkillMdDocument + * @throws BusinessException 格式不合法时抛出 400 错误 + */ + public SkillMdDocument parse(String content) { + if (content == null || content.isBlank()) { + throw new BusinessException(ErrorCode.INVALID_PARAMETER, "SKILL.md 内容不能为空"); + } + + String trimmed = content.strip(); + + if (!trimmed.startsWith(DELIMITER)) { + throw new BusinessException(ErrorCode.INVALID_PARAMETER, "缺少 YAML frontmatter 分隔符"); + } + + // 查找第二个 --- 分隔符(跳过第一个) + int secondDelimiterIndex = trimmed.indexOf(DELIMITER, DELIMITER.length()); + if (secondDelimiterIndex < 0) { + throw new BusinessException(ErrorCode.INVALID_PARAMETER, "缺少 YAML frontmatter 结束分隔符"); + } + + // 提取 frontmatter YAML 文本 + String yamlText = trimmed.substring(DELIMITER.length(), secondDelimiterIndex).strip(); + + // 提取 body(第二个 --- 之后的内容) + String body = trimmed.substring(secondDelimiterIndex + DELIMITER.length()); + // 去掉 body 开头的第一个换行符(如果有的话),保留其余格式 + if (body.startsWith("\n")) { + body = body.substring(1); + } else if (body.startsWith("\r\n")) { + body = body.substring(2); + } + + // 解析 YAML frontmatter + Map frontmatter; + try { + Object parsed = yaml.load(yamlText); + if (parsed == null) { + frontmatter = new LinkedHashMap<>(); + } else if (parsed instanceof Map) { + @SuppressWarnings("unchecked") + Map map = (Map) parsed; + frontmatter = new LinkedHashMap<>(map); + } else { + throw new BusinessException( + ErrorCode.INVALID_PARAMETER, + "YAML frontmatter 格式错误:期望键值对映射,实际为 " + parsed.getClass().getSimpleName()); + } + } catch (YAMLException e) { + throw new BusinessException( + ErrorCode.INVALID_PARAMETER, "YAML frontmatter 解析失败:" + e.getMessage()); + } + + return new SkillMdDocument(frontmatter, body); + } + + /** + * 将 SkillMdDocument 序列化为 SKILL.md 格式。 + * + * @param document SkillMdDocument 对象 + * @return SKILL.md 格式的文本 + */ + public String serialize(SkillMdDocument document) { + if (document == null) { + throw new BusinessException(ErrorCode.INVALID_PARAMETER, "SkillMdDocument 不能为空"); + } + + StringBuilder sb = new StringBuilder(); + sb.append(DELIMITER).append("\n"); + + Map frontmatter = document.getFrontmatter(); + if (frontmatter != null && !frontmatter.isEmpty()) { + String yamlStr = yaml.dump(frontmatter); + // SnakeYAML dump 末尾自带换行符 + sb.append(yamlStr); + } + + sb.append(DELIMITER).append("\n"); + + String body = document.getBody(); + if (body != null && !body.isEmpty()) { + sb.append(body); + } + + return sb.toString(); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillZipParser.java b/himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillZipParser.java new file mode 100644 index 000000000..6f49f432a --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/core/skill/SkillZipParser.java @@ -0,0 +1,264 @@ +package com.alibaba.himarket.core.skill; + +import com.alibaba.himarket.core.exception.BusinessException; +import com.alibaba.himarket.core.exception.ErrorCode; +import com.alibaba.nacos.api.ai.model.skills.Skill; +import com.alibaba.nacos.api.ai.model.skills.SkillResource; +import com.alibaba.nacos.api.ai.model.skills.SkillUtils; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Base64; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; +import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; + +/** + * ZIP 包解析工具类。参考 Nacos 服务端 SkillZipParser 实现。 + * 解析 ZIP → 提取 SKILL.md → 构建 Skill 对象。 + */ +public final class SkillZipParser { + + private SkillZipParser() {} + + private static final String SKILL_MD_FILE = "SKILL.md"; + private static final String MACOS_METADATA_PREFIX = "._"; + private static final Pattern YAML_FRONT_MATTER = + Pattern.compile("^---\\s*\\n(.*?)\\n---\\s*\\n(.*)$", Pattern.DOTALL); + + private static final Set BINARY_EXTENSIONS = new HashSet<>(); + + static { + BINARY_EXTENSIONS.add("ttf"); + BINARY_EXTENSIONS.add("otf"); + BINARY_EXTENSIONS.add("woff"); + BINARY_EXTENSIONS.add("woff2"); + BINARY_EXTENSIONS.add("eot"); + BINARY_EXTENSIONS.add("png"); + BINARY_EXTENSIONS.add("jpg"); + BINARY_EXTENSIONS.add("jpeg"); + BINARY_EXTENSIONS.add("gif"); + BINARY_EXTENSIONS.add("webp"); + BINARY_EXTENSIONS.add("ico"); + BINARY_EXTENSIONS.add("pdf"); + BINARY_EXTENSIONS.add("bin"); + } + + /** + * 解析 ZIP 包为 Nacos Skill 对象。 + * + * @param zipBytes ZIP 文件字节数组 + * @param namespaceId Nacos 命名空间 + * @return Skill 对象(含 name、description、instruction、resources) + */ + public static Skill parseSkillFromZip(byte[] zipBytes, String namespaceId) { + if (zipBytes == null || zipBytes.length == 0) { + throw new BusinessException(ErrorCode.INVALID_PARAMETER, "ZIP 文件为空"); + } + + try { + List entries = unzipToEntries(zipBytes); + + // 查找 SKILL.md(根目录或一级子目录) + String skillMdContent = null; + for (ZipEntryData entry : entries) { + if (isMacOsMetadataFile(entry.name)) continue; + if (SKILL_MD_FILE.equals(entry.name) || entry.name.endsWith("/" + SKILL_MD_FILE)) { + skillMdContent = new String(entry.data, StandardCharsets.UTF_8); + break; + } + } + + if (skillMdContent == null || skillMdContent.isBlank()) { + throw new BusinessException(ErrorCode.INVALID_PARAMETER, "ZIP 包中未找到 SKILL.md 文件"); + } + + Skill skill = parseSkillMarkdown(skillMdContent, namespaceId); + Map resources = parseResources(entries, skill.getName()); + skill.setResource(resources); + return skill; + + } catch (BusinessException e) { + throw e; + } catch (Exception e) { + throw new BusinessException(ErrorCode.INVALID_PARAMETER, "ZIP 解析失败: " + e.getMessage()); + } + } + + private static List unzipToEntries(byte[] zipBytes) throws IOException { + List result = new ArrayList<>(); + try (ZipArchiveInputStream zis = + new ZipArchiveInputStream( + new ByteArrayInputStream(zipBytes), + StandardCharsets.UTF_8.name(), + true, + true)) { + ZipArchiveEntry entry; + byte[] buffer = new byte[8192]; + while ((entry = zis.getNextEntry()) != null) { + if (entry.isDirectory()) continue; + String name = entry.getName(); + if (name != null && (name.contains("__MACOSX") || name.contains("/__MACOSX/"))) + continue; + ByteArrayOutputStream out = new ByteArrayOutputStream(); + int n; + while ((n = zis.read(buffer)) != -1) { + out.write(buffer, 0, n); + } + result.add(new ZipEntryData(name, out.toByteArray())); + } + } + return result; + } + + private static Map parseResources( + List entries, String skillName) { + Map resources = new HashMap<>(16); + for (ZipEntryData entry : entries) { + String itemName = entry.name; + if (isMacOsMetadataFile(itemName)) continue; + if (itemName.endsWith(SKILL_MD_FILE) || itemName.endsWith("/")) continue; + + String[] parts = itemName.split("/"); + String type; + String resourceName; + + if (parts.length == 1) { + type = ""; + resourceName = parts[0]; + } else if (parts.length == 2 && parts[0].equals(skillName)) { + type = ""; + resourceName = parts[1]; + } else if (parts.length >= 3 && parts[0].equals(skillName)) { + StringBuilder typeSb = new StringBuilder(); + for (int i = 1; i < parts.length - 1; i++) { + if (typeSb.length() > 0) typeSb.append('/'); + typeSb.append(parts[i]); + } + type = typeSb.toString(); + resourceName = parts[parts.length - 1]; + } else if (parts.length >= 2) { + StringBuilder typeSb = new StringBuilder(); + for (int i = 0; i < parts.length - 1; i++) { + if (typeSb.length() > 0) typeSb.append('/'); + typeSb.append(parts[i]); + } + type = typeSb.toString(); + resourceName = parts[parts.length - 1]; + } else { + continue; + } + + boolean isBinary = isBinaryResource(resourceName); + String content; + Map metadata = new HashMap<>(4); + if (isBinary) { + content = Base64.getEncoder().encodeToString(entry.data); + metadata.put("encoding", "base64"); + } else { + content = new String(entry.data, StandardCharsets.UTF_8); + } + + SkillResource resource = new SkillResource(); + resource.setName(resourceName); + resource.setType(type); + resource.setContent(content); + resource.setMetadata(metadata.isEmpty() ? null : metadata); + String key = SkillUtils.generateResourceId(type, resourceName); + resources.put(key, resource); + } + return resources; + } + + private static Skill parseSkillMarkdown(String markdownContent, String namespaceId) { + Matcher matcher = YAML_FRONT_MATTER.matcher(markdownContent); + if (!matcher.matches()) { + throw new BusinessException( + ErrorCode.INVALID_PARAMETER, "SKILL.md 必须包含 YAML front matter (---)"); + } + + String yamlContent = matcher.group(1); + String instructionContent = matcher.group(2); + + Map yamlMap = parseYamlFrontMatter(yamlContent); + String name = yamlMap.get("name"); + String description = yamlMap.get("description"); + + if (name == null || name.isBlank()) { + throw new BusinessException( + ErrorCode.INVALID_PARAMETER, "SKILL.md YAML front matter 中缺少 name"); + } + if (description == null || description.isBlank()) { + throw new BusinessException( + ErrorCode.INVALID_PARAMETER, "SKILL.md YAML front matter 中缺少 description"); + } + + String instruction = extractInstruction(instructionContent); + if (instruction == null || instruction.isBlank()) { + throw new BusinessException(ErrorCode.INVALID_PARAMETER, "SKILL.md 中缺少 instruction 内容"); + } + + Skill skill = new Skill(); + skill.setNamespaceId(namespaceId); + skill.setName(name.trim()); + skill.setDescription(description.trim()); + skill.setInstruction(instruction.trim()); + return skill; + } + + private static Map parseYamlFrontMatter(String yamlContent) { + Map result = new HashMap<>(4); + for (String line : yamlContent.split("\\n")) { + line = line.trim(); + if (line.isEmpty() || line.startsWith("#")) continue; + int colonIndex = line.indexOf(':'); + if (colonIndex > 0) { + String key = line.substring(0, colonIndex).trim(); + String value = line.substring(colonIndex + 1).trim(); + if (value.startsWith("\"") && value.endsWith("\"")) { + value = value.substring(1, value.length() - 1); + value = value.replace("\\\\", "\\").replace("\\\"", "\""); + } else if (value.startsWith("'") && value.endsWith("'")) { + value = value.substring(1, value.length() - 1); + value = value.replace("''", "'"); + } + result.put(key, value); + } + } + return result; + } + + private static String extractInstruction(String content) { + String trimmed = content.trim(); + if (trimmed.startsWith("## Instructions") || trimmed.startsWith("##Instructions")) { + int headerEnd = trimmed.indexOf('\n'); + if (headerEnd > 0) { + trimmed = trimmed.substring(headerEnd).trim(); + } + } + return trimmed; + } + + private static boolean isBinaryResource(String fileName) { + if (fileName == null || !fileName.contains(".")) return false; + String ext = fileName.substring(fileName.lastIndexOf('.') + 1).trim().toLowerCase(); + return BINARY_EXTENSIONS.contains(ext); + } + + private static boolean isMacOsMetadataFile(String itemName) { + if (itemName == null || itemName.isEmpty()) return false; + int lastSlash = itemName.lastIndexOf('/'); + String fileName = lastSlash >= 0 ? itemName.substring(lastSlash + 1) : itemName; + return fileName.startsWith(MACOS_METADATA_PREFIX); + } + + private record ZipEntryData(String name, byte[] data) {} +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/core/utils/TokenUtil.java b/himarket-server/src/main/java/com/alibaba/himarket/core/utils/TokenUtil.java index ceb7eae04..9e21e1458 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/core/utils/TokenUtil.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/core/utils/TokenUtil.java @@ -66,7 +66,12 @@ private static long getJwtExpireMillis() { } if (expiration.matches("\\d+[smhd]")) { - JWT_EXPIRE_MILLIS = Duration.parse("PT" + expiration.toUpperCase()).toMillis(); + String upper = expiration.toUpperCase(); + if (upper.endsWith("D")) { + JWT_EXPIRE_MILLIS = Duration.parse("P" + upper).toMillis(); + } else { + JWT_EXPIRE_MILLIS = Duration.parse("PT" + upper).toMillis(); + } } else { JWT_EXPIRE_MILLIS = Long.parseLong(expiration); } diff --git a/himarket-server/src/main/java/com/alibaba/himarket/dto/params/coding/CreateCodingSessionParam.java b/himarket-server/src/main/java/com/alibaba/himarket/dto/params/coding/CreateCodingSessionParam.java new file mode 100644 index 000000000..4e6c0af55 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/dto/params/coding/CreateCodingSessionParam.java @@ -0,0 +1,23 @@ +package com.alibaba.himarket.dto.params.coding; + +import com.alibaba.himarket.dto.converter.InputConverter; +import com.alibaba.himarket.entity.CodingSession; +import jakarta.validation.constraints.NotBlank; +import lombok.Data; + +@Data +public class CreateCodingSessionParam implements InputConverter { + + @NotBlank(message = "cliSessionId cannot be empty") + private String cliSessionId; + + private String title; + + private String providerKey; + + private String cwd; + + private String modelProductId; + + private String modelName; +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/dto/params/coding/UpdateCodingSessionParam.java b/himarket-server/src/main/java/com/alibaba/himarket/dto/params/coding/UpdateCodingSessionParam.java new file mode 100644 index 000000000..20fbdf442 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/dto/params/coding/UpdateCodingSessionParam.java @@ -0,0 +1,13 @@ +package com.alibaba.himarket.dto.params.coding; + +import com.alibaba.himarket.dto.converter.InputConverter; +import com.alibaba.himarket.entity.CodingSession; +import lombok.Data; + +@Data +public class UpdateCodingSessionParam implements InputConverter { + + private String title; + + private String cliSessionId; +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/dto/params/product/UpdateSkillNacosParam.java b/himarket-server/src/main/java/com/alibaba/himarket/dto/params/product/UpdateSkillNacosParam.java new file mode 100644 index 000000000..e1ea027b5 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/dto/params/product/UpdateSkillNacosParam.java @@ -0,0 +1,14 @@ +package com.alibaba.himarket.dto.params.product; + +import jakarta.validation.constraints.NotBlank; +import lombok.Data; + +@Data +public class UpdateSkillNacosParam { + + @NotBlank(message = "nacosId cannot be blank") + private String nacosId; + + @NotBlank(message = "namespace cannot be blank") + private String namespace; +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketMcpInfo.java b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketMcpInfo.java new file mode 100644 index 000000000..83a46ad31 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketMcpInfo.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.alibaba.himarket.dto.result.cli; + +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +public class MarketMcpInfo { + + private String productId; + + private String name; + + private String url; + + private String transportType; + + private String description; +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketMcpsResponse.java b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketMcpsResponse.java new file mode 100644 index 000000000..39212b8e7 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketMcpsResponse.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.alibaba.himarket.dto.result.cli; + +import java.util.List; +import java.util.Map; +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +public class MarketMcpsResponse { + + private List mcpServers; + + private Map authHeaders; +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketModelInfo.java b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketModelInfo.java new file mode 100644 index 000000000..486c2106b --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketModelInfo.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.alibaba.himarket.dto.result.cli; + +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +public class MarketModelInfo { + + private String productId; + + private String name; + + private String modelId; + + private String baseUrl; + + private String protocolType; + + private String description; +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketModelsResponse.java b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketModelsResponse.java new file mode 100644 index 000000000..481e9fe85 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketModelsResponse.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.alibaba.himarket.dto.result.cli; + +import java.util.List; +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +public class MarketModelsResponse { + + private List models; + + private String apiKey; +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketSkillInfo.java b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketSkillInfo.java new file mode 100644 index 000000000..005100fa3 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/cli/MarketSkillInfo.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.alibaba.himarket.dto.result.cli; + +import java.util.List; +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +public class MarketSkillInfo { + + private String productId; + + private String name; + + private String description; + + private List skillTags; +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/dto/result/coding/CodingSessionResult.java b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/coding/CodingSessionResult.java new file mode 100644 index 000000000..c13de7ced --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/coding/CodingSessionResult.java @@ -0,0 +1,28 @@ +package com.alibaba.himarket.dto.result.coding; + +import com.alibaba.himarket.dto.converter.OutputConverter; +import com.alibaba.himarket.entity.CodingSession; +import java.time.LocalDateTime; +import lombok.Data; + +@Data +public class CodingSessionResult implements OutputConverter { + + private String sessionId; + + private String cliSessionId; + + private String title; + + private String providerKey; + + private String cwd; + + private String modelProductId; + + private String modelName; + + private LocalDateTime createAt; + + private LocalDateTime updatedAt; +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/dto/result/nacos/NacosResult.java b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/nacos/NacosResult.java index c7b1ed1ec..b5e678891 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/dto/result/nacos/NacosResult.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/nacos/NacosResult.java @@ -41,5 +41,9 @@ public class NacosResult implements OutputConverter private String adminId; + private Boolean isDefault; + + private String defaultNamespace; + private LocalDateTime createAt; } diff --git a/himarket-server/src/main/java/com/alibaba/himarket/dto/result/product/ProductResult.java b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/product/ProductResult.java index fc3d701b9..7fb485cce 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/dto/result/product/ProductResult.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/product/ProductResult.java @@ -30,6 +30,7 @@ import com.alibaba.himarket.support.enums.ProductType; import com.alibaba.himarket.support.product.Icon; import com.alibaba.himarket.support.product.ProductFeature; +import com.alibaba.himarket.support.product.SkillConfig; import java.time.LocalDateTime; import java.util.List; import lombok.Data; @@ -69,6 +70,8 @@ public class ProductResult implements OutputConverter { private ModelConfigResult modelConfig; + private SkillConfig skillConfig; + private Boolean enabled; private ProductFeature feature; diff --git a/himarket-server/src/main/java/com/alibaba/himarket/dto/result/skill/SkillFileContentResult.java b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/skill/SkillFileContentResult.java new file mode 100644 index 000000000..e242d5b6d --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/skill/SkillFileContentResult.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.alibaba.himarket.dto.result.skill; + +import lombok.Data; + +@Data +public class SkillFileContentResult { + + /** Relative path from skill root */ + private String path; + + /** File content (text or base64-encoded string) */ + private String content; + + /** Encoding: "text" or "base64" */ + private String encoding; + + /** File size in bytes */ + private Integer size; +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/dto/result/skill/SkillFileTreeNode.java b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/skill/SkillFileTreeNode.java new file mode 100644 index 000000000..77dfd0099 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/dto/result/skill/SkillFileTreeNode.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.alibaba.himarket.dto.result.skill; + +import java.util.List; +import lombok.Data; + +@Data +public class SkillFileTreeNode { + + /** File or directory name */ + private String name; + + /** Relative path from skill root */ + private String path; + + /** Node type: "file" or "directory" */ + private String type; + + /** Encoding of the file content: "text" or "base64" (only for file nodes) */ + private String encoding; + + /** File size in bytes (only for file nodes) */ + private Integer size; + + /** Child nodes (only for directory nodes) */ + private List children; +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/CodingSessionService.java b/himarket-server/src/main/java/com/alibaba/himarket/service/CodingSessionService.java new file mode 100644 index 000000000..88a91a0af --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/CodingSessionService.java @@ -0,0 +1,18 @@ +package com.alibaba.himarket.service; + +import com.alibaba.himarket.dto.params.coding.CreateCodingSessionParam; +import com.alibaba.himarket.dto.params.coding.UpdateCodingSessionParam; +import com.alibaba.himarket.dto.result.coding.CodingSessionResult; +import com.alibaba.himarket.dto.result.common.PageResult; +import org.springframework.data.domain.Pageable; + +public interface CodingSessionService { + + CodingSessionResult createSession(CreateCodingSessionParam param); + + PageResult listSessions(Pageable pageable); + + CodingSessionResult updateSession(String sessionId, UpdateCodingSessionParam param); + + void deleteSession(String sessionId); +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/ConsumerService.java b/himarket-server/src/main/java/com/alibaba/himarket/service/ConsumerService.java index 93dc630a0..686e6225a 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/service/ConsumerService.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/ConsumerService.java @@ -172,4 +172,13 @@ PageResult listSubscriptions( * @return ConsumerResult */ ConsumerResult getPrimaryConsumer(); + + /** + * Obtain primary consumer for the specified developer (used in async contexts + * where SecurityContext is not available). + * + * @param developerId developer ID + * @return ConsumerResult + */ + ConsumerResult getPrimaryConsumer(String developerId); } diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/NacosService.java b/himarket-server/src/main/java/com/alibaba/himarket/service/NacosService.java index c06d04376..92c5f7f58 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/service/NacosService.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/NacosService.java @@ -28,7 +28,9 @@ import com.alibaba.himarket.dto.result.nacos.MseNacosResult; import com.alibaba.himarket.dto.result.nacos.NacosNamespaceResult; import com.alibaba.himarket.dto.result.nacos.NacosResult; +import com.alibaba.himarket.entity.NacosInstance; import com.alibaba.himarket.support.product.NacosRefConfig; +import com.alibaba.nacos.maintainer.client.ai.AiMaintainerService; import org.springframework.data.domain.Pageable; /** Nacos服务接口,定义Nacos实例管理和MCP服务器配置相关操作 */ @@ -136,4 +138,48 @@ PageResult fetchAgents(String nacosId, String namespaceId, Pag * @return Agent 配置的 JSON 字符串 */ String fetchAgentConfig(String nacosId, NacosRefConfig nacosRefConfig); + + // ==================== Skill 相关 ==================== + + /** + * 根据 nacosId 获取缓存的 AiMaintainerService 实例。 + * 复用已有的 buildDynamicAiService + ConcurrentHashMap 缓存机制。 + * + * @param nacosId nacos_instance 表主键 + * @return AiMaintainerService 实例 + * @throws BusinessException nacosId 不存在或连接失败 + */ + AiMaintainerService getAiMaintainerService(String nacosId); + + /** + * 根据 nacosId 查询 NacosInstance 记录(用于提取凭证信息)。 + * + * @param nacosId nacos_instance 表主键 + * @return NacosInstance 实体 + * @throws BusinessException nacosId 不存在 + */ + NacosInstance findNacosInstanceById(String nacosId); + + /** + * 获取默认 Nacos 实例。 + * + * @return 默认实例信息,不存在时返回 null + */ + NacosResult getDefaultNacosInstance(); + + /** + * 设置指定 Nacos 实例为默认。 + * + * @param nacosId 要设为默认的实例 ID + */ + void setDefaultNacosInstance(String nacosId); + + /** + * 设置指定 Nacos 实例的默认命名空间。 + * 会先用已保存的认证信息连接 Nacos 验证命名空间是否存在,再保存。 + * + * @param nacosId Nacos 实例 ID + * @param namespaceId 要设为默认的命名空间 ID + */ + void setDefaultNamespace(String nacosId, String namespaceId); } diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/ProductService.java b/himarket-server/src/main/java/com/alibaba/himarket/service/ProductService.java index f59eedcf6..efd450e74 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/service/ProductService.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/ProductService.java @@ -187,4 +187,13 @@ PageResult listProductSubscriptions( * @param productId */ McpToolListResult listMcpTools(String productId); + + /** + * Update Skill's Nacos binding (nacosId + namespace) + * + * @param productId + * @param nacosId + * @param namespace + */ + void updateSkillNacos(String productId, String nacosId, String namespace); } diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/SkillService.java b/himarket-server/src/main/java/com/alibaba/himarket/service/SkillService.java new file mode 100644 index 000000000..16e4729ae --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/SkillService.java @@ -0,0 +1,51 @@ +package com.alibaba.himarket.service; + +import com.alibaba.himarket.dto.result.skill.SkillFileContentResult; +import com.alibaba.himarket.dto.result.skill.SkillFileTreeNode; +import com.alibaba.nacos.api.ai.model.skills.Skill; +import com.alibaba.nacos.api.ai.model.skills.SkillBasicInfo; +import com.alibaba.nacos.api.model.Page; +import jakarta.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.util.List; + +/** + * Nacos Skill SDK 透传服务。 + * 所有操作通过 NacosService.getAiMaintainerService(nacosId) 获取 SDK 实例。 + */ +public interface SkillService { + + /** 创建 Skill → SDK registerSkill() */ + String createSkill(String nacosId, String namespace, Skill skill); + + /** 查询 Skill 详情 → SDK getSkillDetail() */ + Skill getSkillDetail(String nacosId, String namespace, String skillName); + + /** 更新 Skill → SDK updateSkill() */ + void updateSkill(String nacosId, String namespace, Skill skill); + + /** 删除 Skill → SDK deleteSkill() */ + void deleteSkill(String nacosId, String namespace, String skillName); + + /** 分页列表 → SDK listSkills() */ + Page listSkills( + String nacosId, String namespace, String search, int pageNo, int pageSize); + + /** 获取 SKILL.md 文档(拼装 frontmatter + instruction) */ + String getSkillDocument(String nacosId, String namespace, String skillName); + + /** 获取文件树 */ + List getFileTree(String nacosId, String namespace, String skillName); + + /** 获取所有文件内容 */ + List getAllFiles(String nacosId, String namespace, String skillName); + + /** 获取单文件内容 */ + SkillFileContentResult getFileContent( + String nacosId, String namespace, String skillName, String path); + + /** ZIP 流式下载 */ + void downloadZip( + String nacosId, String namespace, String skillName, HttpServletResponse response) + throws IOException; +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/document/DocumentConversionService.java b/himarket-server/src/main/java/com/alibaba/himarket/service/document/DocumentConversionService.java new file mode 100644 index 000000000..bfc188db6 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/document/DocumentConversionService.java @@ -0,0 +1,143 @@ +package com.alibaba.himarket.service.document; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.concurrent.TimeUnit; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +/** + * Converts office documents (PPTX, PPT, etc.) to PDF using LibreOffice headless. + * + *

Converted PDFs are cached next to the source file. A cached PDF is reused if it is newer than + * the source file. + */ +@Service +@Slf4j +public class DocumentConversionService { + + private static final long CONVERSION_TIMEOUT_SECONDS = 60; + private static final String LIBREOFFICE_COMMAND = "libreoffice"; + + /** + * Convert an office document to PDF. + * + * @param sourceFile absolute path to the source document (.pptx, .ppt, etc.) + * @return path to the converted PDF, or {@code null} if conversion failed + */ + public Path convertToPdf(Path sourceFile) { + Path normalizedSource = sourceFile.toAbsolutePath().normalize(); + Path cachedPdf = resolveCachedPdfPath(normalizedSource); + + Path cached = getCachedPdfIfUpToDate(normalizedSource); + if (cached != null) { + log.debug("Using cached PDF: {}", cached); + return cached; + } + + // Run LibreOffice headless conversion + try { + Path outDir = normalizedSource.getParent(); + ProcessBuilder pb = + new ProcessBuilder( + LIBREOFFICE_COMMAND, + "--headless", + "--convert-to", + "pdf", + normalizedSource.getFileName().toString(), + "--outdir", + outDir.toString()); + pb.directory(outDir.toFile()); + pb.redirectErrorStream(true); + + log.info("Converting to PDF: {}", normalizedSource); + Process process = pb.start(); + + boolean finished = process.waitFor(CONVERSION_TIMEOUT_SECONDS, TimeUnit.SECONDS); + if (!finished) { + process.destroyForcibly(); + log.warn( + "LibreOffice conversion timed out after {}s: {}", + CONVERSION_TIMEOUT_SECONDS, + normalizedSource); + return null; + } + + if (process.exitValue() != 0) { + String output = new String(process.getInputStream().readAllBytes()); + log.warn( + "LibreOffice conversion failed (exit={}): {}\n{}", + process.exitValue(), + normalizedSource, + output); + return null; + } + + // LibreOffice outputs .pdf in the outdir (e.g. demo.pptx → demo.pdf) + String stem = stripExtension(normalizedSource.getFileName().toString()); + Path generatedPdf = outDir.resolve(stem + ".pdf"); + + if (!Files.exists(generatedPdf)) { + log.warn("Expected PDF not found after conversion: {}", generatedPdf); + return null; + } + + // Rename to our cache path to avoid collisions + if (!generatedPdf.equals(cachedPdf)) { + Files.move( + generatedPdf, cachedPdf, java.nio.file.StandardCopyOption.REPLACE_EXISTING); + } + + log.info( + "Conversion successful: {} → {}", + normalizedSource.getFileName(), + cachedPdf.getFileName()); + return cachedPdf; + + } catch (IOException e) { + log.error("LibreOffice conversion I/O error: {}", normalizedSource, e); + return null; + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + log.warn("LibreOffice conversion interrupted: {}", normalizedSource); + return null; + } + } + + /** + * Get the cache path for converted PDF. + * + * @param sourceFile source office file + * @return cache path, pattern: {@code .pdf} + */ + public Path resolveCachedPdfPath(Path sourceFile) { + Path normalizedSource = sourceFile.toAbsolutePath().normalize(); + return normalizedSource.resolveSibling(normalizedSource.getFileName() + ".pdf"); + } + + /** + * Return the cached PDF path if it exists and is newer than source, otherwise {@code null}. + */ + public Path getCachedPdfIfUpToDate(Path sourceFile) { + Path normalizedSource = sourceFile.toAbsolutePath().normalize(); + Path cachedPdf = resolveCachedPdfPath(normalizedSource); + + try { + if (Files.exists(cachedPdf) + && Files.getLastModifiedTime(cachedPdf) + .compareTo(Files.getLastModifiedTime(normalizedSource)) + >= 0) { + return cachedPdf; + } + } catch (IOException e) { + log.warn("Failed to check cached PDF timestamp for {}", normalizedSource, e); + } + return null; + } + + private static String stripExtension(String fileName) { + int lastDot = fileName.lastIndexOf('.'); + return lastDot > 0 ? fileName.substring(0, lastDot) : fileName; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/gateway/AdpAIGatewayOperator.java b/himarket-server/src/main/java/com/alibaba/himarket/service/gateway/AdpAIGatewayOperator.java index 0fdaf0e39..ca9f2adb4 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/service/gateway/AdpAIGatewayOperator.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/gateway/AdpAIGatewayOperator.java @@ -545,6 +545,26 @@ private String convertToModelConfigJson( private List buildRoutesFromAdpService( AdpAiServiceDetailResult.AdpAiServiceDetail data, List domains) { if (data.getMethodPathList() == null || data.getMethodPathList().isEmpty()) { + // MODEL_API 场景下,部分模型(如 qwen-plus)不返回 methodPathList, + // 此时使用 basePath + 默认路径构建兜底路由,以确保 BaseUrlExtractor 能提取 baseUrl + if (domains != null && !domains.isEmpty()) { + String defaultPath = + (data.getBasePath() != null ? data.getBasePath() : "") + + "/v1/chat/completions"; + HttpRouteResult route = new HttpRouteResult(); + route.setDomains(domains); + route.setMatch( + HttpRouteResult.RouteMatchResult.builder() + .methods(Collections.singletonList("POST")) + .path( + HttpRouteResult.RouteMatchPath.builder() + .value(defaultPath) + .type("Exact") + .build()) + .build()); + route.setBuiltin(false); + return Collections.singletonList(route); + } return Collections.emptyList(); } diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hichat/service/AbstractLlmService.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hichat/service/AbstractLlmService.java index 5802bfd57..3bce2b845 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/service/hichat/service/AbstractLlmService.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hichat/service/AbstractLlmService.java @@ -61,7 +61,6 @@ public Flux invokeLlm( try { LlmChatRequest request = composeRequest(param); - // request.tryResolveDns(); Model chatModel = newChatModel(request); ChatBot chatBot = chatBotManager.getOrCreateChatBot(request, chatModel); @@ -176,7 +175,7 @@ protected ModelFeature getOrDefaultModelFeature(ProductResult product) { .orElseGet(() -> ModelFeature.builder().build()); return ModelFeature.builder() - .model(StrUtil.blankToDefault(modelFeature.getModel(), "qwen-max")) + .model(modelFeature.getModel()) .maxTokens(ObjectUtil.defaultIfNull(modelFeature.getMaxTokens(), 5000)) .temperature(ObjectUtil.defaultIfNull(modelFeature.getTemperature(), 0.9)) .streaming(ObjectUtil.defaultIfNull(modelFeature.getStreaming(), true)) diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hichat/support/LlmChatRequest.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hichat/support/LlmChatRequest.java index fcd4e418e..41447aca9 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/service/hichat/support/LlmChatRequest.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hichat/support/LlmChatRequest.java @@ -1,11 +1,9 @@ package com.alibaba.himarket.service.hichat.support; -import cn.hutool.core.collection.CollUtil; import com.alibaba.himarket.dto.result.product.ProductResult; import com.alibaba.himarket.support.chat.mcp.MCPTransportConfig; import io.agentscope.core.message.Msg; import java.net.URI; -import java.util.HashMap; import java.util.List; import java.util.Map; import lombok.Builder; @@ -76,37 +74,4 @@ public class LlmChatRequest { * MCP servers with transport config */ private List mcpConfigs; - - @Deprecated - public void tryResolveDns() { - if (CollUtil.isEmpty(gatewayUris) || !"http".equalsIgnoreCase(uri.getScheme())) { - return; - } - - try { - // Randomly select a gateway URI - URI gatewayUri = gatewayUris.get(0); - - String originalHost = uri.getHost(); - // Build new URI keeping original path and query but replacing scheme, host and port - this.uri = - new URI( - gatewayUri.getScheme(), - uri.getUserInfo(), - gatewayUri.getHost(), - gatewayUri.getPort(), - uri.getPath(), - uri.getQuery(), - uri.getFragment()); - - if (this.headers == null) { - this.headers = new HashMap<>(); - } - // Set Host header - this.headers.put("Host", originalHost); - - } catch (Exception e) { - log.warn("Failed to resolve DNS for URI: {}", uri, e); - } - } } diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/RemoteWorkspaceService.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/RemoteWorkspaceService.java new file mode 100644 index 000000000..2a878757d --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/RemoteWorkspaceService.java @@ -0,0 +1,443 @@ +package com.alibaba.himarket.service.hicoding; + +import com.alibaba.himarket.config.AcpProperties; +import com.alibaba.himarket.core.exception.BusinessException; +import com.alibaba.himarket.core.exception.ErrorCode; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.net.ConnectException; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpConnectTimeoutException; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.Duration; +import java.util.ArrayList; +import java.util.Base64; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; + +/** + * 远程沙箱文件操作服务。 + * + *

封装通过 Sidecar HTTP API 操作远程沙箱内文件的逻辑, + * 供 WorkspaceController 在 runtime=remote 时调用。 + * + *

使用 AcpProperties.remote 配置获取 Sidecar 地址。 + */ +@Service +public class RemoteWorkspaceService { + + private static final Logger log = LoggerFactory.getLogger(RemoteWorkspaceService.class); + private static final Duration HTTP_TIMEOUT = Duration.ofSeconds(10); + private static final Duration UPLOAD_TIMEOUT = Duration.ofSeconds(30); + private static final String WORKSPACE_ROOT = "/workspace"; + + private final AcpProperties acpProperties; + private final HttpClient httpClient; + private final ObjectMapper objectMapper; + + public RemoteWorkspaceService(AcpProperties acpProperties) { + this.acpProperties = acpProperties; + this.httpClient = + HttpClient.newBuilder() + .connectTimeout(HTTP_TIMEOUT) + .version(HttpClient.Version.HTTP_1_1) + .build(); + this.objectMapper = new ObjectMapper(); + } + + /** + * 获取远程 Sidecar 的地址。 + */ + private String getRemoteHost() { + return acpProperties.getRemote().getHost(); + } + + private int getRemotePort() { + return acpProperties.getRemote().getPort(); + } + + /** + * 获取 Pod 内的目录树。 + * 通过 Sidecar HTTP API /files/list 实现,并将返回结果转换为前端期望的树形结构。 + * + * @param userId 用户 ID + * @param cwd 工作目录路径 + * @param depth 目录深度 + * @return 前端期望的树形结构 Map + * @throws IOException 网络或解析异常 + */ + public Map getDirectoryTree(String userId, String cwd, int depth) + throws IOException { + validateUserPath(userId, cwd); + String host = getRemoteHost(); + String url = "http://" + host + ":" + getRemotePort() + "/files/list"; + String body = objectMapper.writeValueAsString(Map.of("path", cwd, "depth", depth)); + + HttpResponse response = sendPost(url, body); + if (response.statusCode() != 200) { + throw new BusinessException( + ErrorCode.SANDBOX_ERROR, + "Sidecar /files/list 失败 (status=" + + response.statusCode() + + "): " + + response.body()); + } + + // Sidecar 返回: [{"name":"src","type":"dir","children":[...]}] + List> sidecarItems = + objectMapper.readValue(response.body(), new TypeReference<>() {}); + + // 转换为前端期望的树形结构(根节点为 cwd 目录) + Map root = new HashMap<>(); + root.put("name", extractDirName(cwd)); + root.put("path", cwd); + root.put("type", "directory"); + root.put("children", convertChildren(sidecarItems, cwd)); + return root; + } + + /** + * 读取 Pod 内的文件内容(UTF-8 文本)。 + * + * @param userId 用户 ID + * @param filePath 文件路径 + * @return 文件内容字符串 + * @throws IOException 网络或解析异常 + */ + public String readFile(String userId, String filePath) throws IOException { + return readFileWithEncoding(userId, filePath, "utf-8").get("content").toString(); + } + + /** + * 读取 Pod 内的文件内容,支持指定编码。 + * + * @param userId 用户 ID + * @param filePath 文件路径 + * @param encoding 编码方式:"utf-8" 或 "base64" + * @return 包含 content 和 encoding 的 Map + * @throws IOException 网络或解析异常 + */ + public Map readFileWithEncoding(String userId, String filePath, String encoding) + throws IOException { + String resolved = resolvePathForUser(userId, filePath); + String host = getRemoteHost(); + String url = "http://" + host + ":" + getRemotePort() + "/files/read"; + Map params = new HashMap<>(); + params.put("path", resolved); + params.put("encoding", encoding); + String body = objectMapper.writeValueAsString(params); + + HttpResponse response = sendPost(url, body); + if (response.statusCode() != 200) { + throw new BusinessException( + ErrorCode.SANDBOX_ERROR, + "Sidecar /files/read 失败 (status=" + + response.statusCode() + + "): " + + response.body()); + } + + JsonNode json = objectMapper.readTree(response.body()); + String content = json.has("content") ? json.get("content").asText() : ""; + String respEncoding = json.has("encoding") ? json.get("encoding").asText() : encoding; + return Map.of("content", content, "encoding", respEncoding); + } + + /** + * 从远程沙箱下载文件的原始字节。 + * 优先尝试 Sidecar GET /files/download(原始二进制流,兼容 OpenSandbox execd), + * 若 404 则降级为 POST /files/read + base64 解码。 + */ + public byte[] readFileBytes(String userId, String filePath) throws IOException { + String resolved = resolvePathForUser(userId, filePath); + String host = getRemoteHost(); + + // 方案 1:GET /files/download — 直接返回原始字节流,无 JSON 包装 + String encodedPath = + java.net.URLEncoder.encode(resolved, java.nio.charset.StandardCharsets.UTF_8); + String downloadUrl = + "http://" + host + ":" + getRemotePort() + "/files/download?path=" + encodedPath; + try { + HttpResponse dlResp = + httpClient.send( + HttpRequest.newBuilder(URI.create(downloadUrl)) + .GET() + .timeout(Duration.ofSeconds(30)) + .build(), + HttpResponse.BodyHandlers.ofByteArray()); + if (dlResp.statusCode() == 200) { + return dlResp.body(); + } + log.info( + "Sidecar /files/download status={}, fallback to /files/read", + dlResp.statusCode()); + } catch (ConnectException | HttpConnectTimeoutException e) { + log.error("Sidecar 不可达: {}", downloadUrl, e); + throw new BusinessException(ErrorCode.SANDBOX_CONNECTION_FAILED, downloadUrl, e); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new IOException("Sidecar 请求被中断: " + downloadUrl, e); + } + + // 方案 2:POST /files/read + base64 解码 + Map result = readFileWithEncoding(userId, filePath, "base64"); + String content = result.get("content").toString(); + String encoding = result.get("encoding").toString(); + if ("base64".equals(encoding)) { + // 清理空白字符,修复 padding + String cleaned = content.replaceAll("[\\s\\r\\n]", ""); + int mod = cleaned.length() % 4; + if (mod != 0) { + cleaned = cleaned + "=".repeat(4 - mod); + } + return Base64.getMimeDecoder().decode(cleaned); + } + return content.getBytes(java.nio.charset.StandardCharsets.UTF_8); + } + + /** + * 获取 Pod 内的文件变更列表。 + * + * @param userId 用户 ID + * @param cwd 工作目录路径 + * @param since 时间戳(毫秒),返回此时间之后的变更 + * @return 文件变更列表,每项包含 path、mtimeMs、size、ext + * @throws IOException 网络或解析异常 + */ + public List> getChanges(String userId, String cwd, long since) + throws IOException { + validateUserPath(userId, cwd); + String host = getRemoteHost(); + String url = "http://" + host + ":" + getRemotePort() + "/files/changes"; + String body = objectMapper.writeValueAsString(Map.of("cwd", cwd, "since", since)); + + HttpResponse response = sendPost(url, body); + if (response.statusCode() != 200) { + throw new BusinessException( + ErrorCode.SANDBOX_ERROR, + "Sidecar /files/changes 失败 (status=" + + response.statusCode() + + "): " + + response.body()); + } + + JsonNode json = objectMapper.readTree(response.body()); + List> changes = new ArrayList<>(); + JsonNode changesNode = json.has("changes") ? json.get("changes") : json; + + if (changesNode.isArray()) { + for (JsonNode item : changesNode) { + Map change = new HashMap<>(); + change.put("path", item.has("path") ? item.get("path").asText() : ""); + change.put("mtimeMs", item.has("mtimeMs") ? item.get("mtimeMs").asLong() : 0L); + change.put("size", item.has("size") ? item.get("size").asLong() : 0L); + change.put("ext", item.has("ext") ? item.get("ext").asText() : ""); + changes.add(change); + } + } + return changes; + } + + /** + * 将文件路径解析为用户工作空间下的绝对路径。 + * + *

沙箱内每个用户的工作目录为 /workspace/{userId}/,但前端传入的路径可能是 + * 相对路径(如 "skills/foo.html"),需要补全为 /workspace/{userId}/skills/foo.html。 + * + * @param userId 用户 ID + * @param filePath 原始文件路径(可能是绝对或相对路径) + * @return 解析后的绝对路径 + */ + private String resolvePathForUser(String userId, String filePath) { + if (filePath == null || filePath.isBlank()) { + return filePath; + } + // 已经是绝对路径 → 保持原样(可能已包含 userId 或指向共享目录) + if (filePath.startsWith("/")) { + return filePath; + } + // 相对路径 → 解析到用户工作空间 + return WORKSPACE_ROOT + "/" + userId + "/" + filePath; + } + + /** + * 验证文件操作路径是否位于 /workspace/{userId} 目录范围内。 + * 将路径规范化后检查是否以用户专属工作空间开头,防止路径遍历和跨用户访问。 + * + * @param userId 当前用户 ID + * @param path 待验证的路径 + * @throws IllegalArgumentException 如果路径超出用户工作空间范围 + */ + private void validateUserPath(String userId, String path) { + if (path == null || path.isBlank()) { + throw new IllegalArgumentException("路径不能为空"); + } + String userRoot = WORKSPACE_ROOT + "/" + userId; + Path normalized = Paths.get(path).normalize(); + if (!normalized.startsWith(Paths.get(userRoot).normalize())) { + throw new IllegalArgumentException("路径越界:不允许访问其他用户的工作空间"); + } + } + + /** + * 上传文件到远程沙箱。 + * 通过 Sidecar HTTP API POST /files/write 实现,文件内容以 base64 编码传输。 + * + * @param userId 用户 ID + * @param originalFilename 原始文件名 + * @param fileData 文件内容字节数组 + * @return 文件在沙箱中的绝对路径 + * @throws IOException 网络或解析异常 + */ + public String uploadFile(String userId, String originalFilename, byte[] fileData) + throws IOException { + String sanitized = sanitizeUploadFileName(originalFilename); + String targetPath = WORKSPACE_ROOT + "/" + userId + "/uploads/" + sanitized; + validateUserPath(userId, targetPath); + + String base64Content = Base64.getEncoder().encodeToString(fileData); + String url = "http://" + getRemoteHost() + ":" + getRemotePort() + "/files/write"; + String body = + objectMapper.writeValueAsString( + Map.of("path", targetPath, "content", base64Content, "encoding", "base64")); + + HttpResponse response = sendPost(url, body, UPLOAD_TIMEOUT); + if (response.statusCode() != 200) { + throw new BusinessException( + ErrorCode.SANDBOX_ERROR, + "Sidecar /files/write 失败 (status=" + + response.statusCode() + + "): " + + response.body()); + } + log.info("File uploaded to sandbox: user={}, path={}", userId, targetPath); + return targetPath; + } + + /** + * 清洗上传文件名:去除路径分隔符,替换非安全字符。 + */ + private static String sanitizeUploadFileName(String originalName) { + if (originalName == null || originalName.isBlank()) { + return "unnamed"; + } + String baseName = originalName; + int lastSlash = Math.max(baseName.lastIndexOf('/'), baseName.lastIndexOf('\\')); + if (lastSlash >= 0) { + baseName = baseName.substring(lastSlash + 1); + } + String sanitized = baseName.replaceAll("[^a-zA-Z0-9._-]", "_"); + return sanitized.isBlank() ? "unnamed" : sanitized; + } + + /** + * 发送 POST 请求到 Sidecar(使用默认超时)。 + */ + private HttpResponse sendPost(String url, String body) throws IOException { + return sendPost(url, body, HTTP_TIMEOUT); + } + + /** + * 发送 POST 请求到 Sidecar。 + * 捕获网络连接异常(ConnectException、HttpConnectTimeoutException), + * 包装为 BusinessException(SANDBOX_CONNECTION_FAILED)。 + */ + private HttpResponse sendPost(String url, String body, Duration timeout) + throws IOException { + try { + return httpClient.send( + HttpRequest.newBuilder(URI.create(url)) + .POST(HttpRequest.BodyPublishers.ofString(body)) + .header("Content-Type", "application/json") + .timeout(timeout) + .build(), + HttpResponse.BodyHandlers.ofString()); + } catch (ConnectException | HttpConnectTimeoutException e) { + log.error("Sidecar 不可达: {}", url, e); + throw new BusinessException(ErrorCode.SANDBOX_CONNECTION_FAILED, url, e); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new IOException("Sidecar 请求被中断: " + url, e); + } + } + + /** + * 将 Sidecar 返回的文件列表转换为前端期望的树形结构。 + * + *

Sidecar 格式: {"name":"src","type":"dir","children":[...]} + *

前端格式: {"name":"src","path":"/workspace/src","type":"directory","children":[...]} + * + * @param sidecarItems Sidecar 返回的文件列表 + * @param parentPath 父目录路径 + * @return 转换后的子节点列表 + */ + private List> convertChildren( + List> sidecarItems, String parentPath) { + List> children = new ArrayList<>(); + if (sidecarItems == null) { + return children; + } + + for (Map item : sidecarItems) { + String name = (String) item.get("name"); + String type = (String) item.get("type"); + String childPath = + parentPath.endsWith("/") ? parentPath + name : parentPath + "/" + name; + + Map node = new HashMap<>(); + node.put("name", name); + node.put("path", childPath); + + if ("dir".equals(type)) { + node.put("type", "directory"); + @SuppressWarnings("unchecked") + List> subChildren = + (List>) item.get("children"); + node.put("children", convertChildren(subChildren, childPath)); + } else { + node.put("type", "file"); + node.put("extension", getExtension(name)); + node.put("size", item.getOrDefault("size", 0L)); + } + + children.add(node); + } + return children; + } + + /** + * 从路径中提取目录名。 + */ + private static String extractDirName(String path) { + if (path == null || path.isEmpty()) { + return ""; + } + String trimmed = path.endsWith("/") ? path.substring(0, path.length() - 1) : path; + int lastSlash = trimmed.lastIndexOf('/'); + return lastSlash >= 0 ? trimmed.substring(lastSlash + 1) : trimmed; + } + + /** + * 获取文件扩展名。 + */ + private static String getExtension(String fileName) { + if (fileName == null) { + return ""; + } + int lastDot = fileName.lastIndexOf('.'); + if (lastDot < 0) { + return ""; + } + return fileName.substring(lastDot).toLowerCase(); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/ClaudeCodeConfigGenerator.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/ClaudeCodeConfigGenerator.java new file mode 100644 index 000000000..c1aaeb487 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/ClaudeCodeConfigGenerator.java @@ -0,0 +1,154 @@ +package com.alibaba.himarket.service.hicoding.cli; + +import com.alibaba.himarket.service.hicoding.session.CustomModelConfig; +import com.alibaba.himarket.service.hicoding.session.ResolvedSessionConfig; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Claude Code CLI 工具的配置文件生成器。 + * 生成 .claude/settings.json 文件到工作目录下的 .claude/ 子目录, + * 将 MCP Server 配置写入 {workingDirectory}/.mcp.json 文件(官方推荐路径)。 + * 支持与已有配置文件合并,保留用户已有的其他配置项。 + */ +public class ClaudeCodeConfigGenerator implements CliConfigGenerator { + + private static final Logger logger = LoggerFactory.getLogger(ClaudeCodeConfigGenerator.class); + + private static final String CLAUDE_DIR = ".claude"; + private static final String CONFIG_FILE_NAME = "settings.json"; + + private final ObjectMapper objectMapper; + + public ClaudeCodeConfigGenerator(ObjectMapper objectMapper) { + this.objectMapper = objectMapper; + } + + @Override + public String supportedProvider() { + return "claude-code"; + } + + @Override + public Map generateConfig(String workingDirectory, CustomModelConfig config) + throws IOException { + // Step 1: 构建环境变量 Map + Map envVars = new HashMap<>(); + envVars.put("ANTHROPIC_API_KEY", config.getApiKey()); + envVars.put("ANTHROPIC_BASE_URL", config.getBaseUrl()); + envVars.put("ANTHROPIC_MODEL", config.getModelId()); + + // Step 2: 创建 .claude 目录 + Path claudeDir = Path.of(workingDirectory, CLAUDE_DIR); + Files.createDirectories(claudeDir); + + // Step 3: 读取已有 settings.json(保留 mcpServers 等字段) + Path configPath = claudeDir.resolve(CONFIG_FILE_NAME); + Map root = readExistingConfig(configPath); + + // Step 4: 合并 env 字段(声明式环境变量) + @SuppressWarnings("unchecked") + Map envSection = + root.containsKey("env") + ? (Map) root.get("env") + : new LinkedHashMap<>(); + envSection.put("ANTHROPIC_API_KEY", config.getApiKey()); + envSection.put("ANTHROPIC_BASE_URL", config.getBaseUrl()); + envSection.put("ANTHROPIC_MODEL", config.getModelId()); + root.put("env", envSection); + + // Step 5: 设置 model 字段 + root.put("model", config.getModelId()); + + // Step 6: 写入 settings.json + writeConfig(configPath, root); + + return envVars; + } + + @Override + public void generateMcpConfig( + String workingDirectory, List mcpServers) + throws IOException { + if (mcpServers == null || mcpServers.isEmpty()) return; + + Path mcpConfigPath = Path.of(workingDirectory, ".mcp.json"); + + Map root = readExistingConfig(mcpConfigPath); + mergeMcpServers(root, mcpServers); + writeConfig(mcpConfigPath, root); + } + + /** + * 将 MCP Server 列表合并到根配置的 mcpServers 段中。 + * 按 name 去重,新条目覆盖同名旧条目。 + */ + @SuppressWarnings("unchecked") + void mergeMcpServers( + Map root, List mcpServers) { + Map mcpServersMap = + root.containsKey("mcpServers") + ? (Map) root.get("mcpServers") + : new LinkedHashMap<>(); + + for (ResolvedSessionConfig.ResolvedMcpEntry entry : mcpServers) { + Map serverConfig = new LinkedHashMap<>(); + serverConfig.put("url", entry.getUrl()); + serverConfig.put("type", entry.getTransportType()); + if (entry.getHeaders() != null && !entry.getHeaders().isEmpty()) { + serverConfig.put("headers", entry.getHeaders()); + } + mcpServersMap.put(entry.getName(), serverConfig); + } + + root.put("mcpServers", mcpServersMap); + } + + @Override + public String skillsDirectory() { + return ".claude/skills/"; + } + + /** + * 读取已有的 JSON 配置文件。 + * 如果文件不存在,返回空 map。 + * 如果文件内容不是合法 JSON,记录警告并返回空 map(后续会覆盖)。 + */ + Map readExistingConfig(Path configPath) { + if (!Files.exists(configPath)) { + return new LinkedHashMap<>(); + } + try { + String content = Files.readString(configPath); + Map existing = + objectMapper.readValue( + content, new TypeReference>() {}); + return existing != null ? existing : new LinkedHashMap<>(); + } catch (Exception e) { + logger.warn("已有配置文件不是合法 JSON,将使用全新配置覆盖: {}", e.getMessage()); + return new LinkedHashMap<>(); + } + } + + /** + * 将配置写入指定的 JSON 配置文件。 + */ + private void writeConfig(Path configPath, Map root) throws IOException { + String json = + objectMapper + .writer() + .with(SerializationFeature.INDENT_OUTPUT) + .writeValueAsString(root); + Files.writeString(configPath, json); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/CliConfigGenerator.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/CliConfigGenerator.java new file mode 100644 index 000000000..ea063ebe9 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/CliConfigGenerator.java @@ -0,0 +1,73 @@ +package com.alibaba.himarket.service.hicoding.cli; + +import com.alibaba.himarket.service.hicoding.session.CustomModelConfig; +import com.alibaba.himarket.service.hicoding.session.ResolvedSessionConfig; +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** + * CLI 工具的自定义模型配置文件生成器接口。 + * 每个支持自定义模型的 CLI 工具(如 Open Code、Qwen Code)提供各自的实现, + * 负责生成对应格式的配置文件并返回需要注入到 CLI 进程的额外环境变量。 + */ +public interface CliConfigGenerator { + + /** + * 该生成器支持的 provider key(如 "opencode"、"qwen-code")。 + * + * @return provider key + */ + String supportedProvider(); + + /** + * 生成配置文件并返回需要注入的额外环境变量。 + * + * @param workingDirectory CLI 进程的工作目录,配置文件将写入该目录 + * @param config 用户的自定义模型配置 + * @return 需要注入到 CLI 进程的额外环境变量(如 API Key) + * @throws IOException 配置文件写入失败时抛出 + */ + Map generateConfig(String workingDirectory, CustomModelConfig config) + throws IOException; + + /** + * 生成 MCP Server 配置(默认空实现)。 + * 子类按需覆盖以实现具体的 MCP 配置注入逻辑。 + * + * @param workingDirectory CLI 进程的工作目录 + * @param mcpServers 解析后的 MCP Server 列表(含完整连接信息) + * @throws IOException 配置文件写入失败时抛出 + */ + default void generateMcpConfig( + String workingDirectory, List mcpServers) + throws IOException { + // 默认不执行任何操作,子类按需覆盖 + } + + /** + * 返回该 CLI 工具的 skills 目录路径(相对于工作目录)。 + * 用于 SkillDownloadPhase 确定 nacos-cli 的输出目录。 + * + * @return skills 目录相对路径,如 ".qoder/skills/" + */ + default String skillsDirectory() { + return "skills/"; + } + + /** + * 生成 Skill 配置(默认实现:生成 nacos-env.yaml)。 + * 按 nacosId 分组,为每个 Nacos 实例生成独立的 .nacos/nacos-env-{nacosId}.yaml 文件。 + * 子类不再需要覆写此方法。 + * + * @param workingDirectory CLI 进程的工作目录 + * @param skills 解析后的 Skill 列表(含 Nacos 坐标和凭证) + * @throws IOException 配置文件写入失败时抛出 + */ + default void generateSkillConfig( + String workingDirectory, List skills) + throws IOException { + if (skills == null || skills.isEmpty()) return; + NacosEnvGenerator.generateNacosEnvFiles(workingDirectory, skills); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/CliConfigGeneratorRegistry.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/CliConfigGeneratorRegistry.java new file mode 100644 index 000000000..ac0c851bc --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/CliConfigGeneratorRegistry.java @@ -0,0 +1,31 @@ +package com.alibaba.himarket.service.hicoding.cli; + +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.HashMap; +import java.util.Map; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * CLI 配置生成器注册表。 + * + *

将各 {@link CliConfigGenerator} 实现注册为 Spring Bean, + * 供 {@link ConfigFileBuilder} 注入使用。 + */ +@Configuration +public class CliConfigGeneratorRegistry { + + @Bean + public Map configGeneratorRegistry(ObjectMapper objectMapper) { + Map registry = new HashMap<>(); + OpenCodeConfigGenerator openCodeGenerator = new OpenCodeConfigGenerator(objectMapper); + QwenCodeConfigGenerator qwenCodeGenerator = new QwenCodeConfigGenerator(objectMapper); + ClaudeCodeConfigGenerator claudeCodeGenerator = new ClaudeCodeConfigGenerator(objectMapper); + QoderCliConfigGenerator qoderCliGenerator = new QoderCliConfigGenerator(objectMapper); + registry.put(openCodeGenerator.supportedProvider(), openCodeGenerator); + registry.put(qwenCodeGenerator.supportedProvider(), qwenCodeGenerator); + registry.put(claudeCodeGenerator.supportedProvider(), claudeCodeGenerator); + registry.put(qoderCliGenerator.supportedProvider(), qoderCliGenerator); + return registry; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/ConfigFileBuilder.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/ConfigFileBuilder.java new file mode 100644 index 000000000..190b063a4 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/ConfigFileBuilder.java @@ -0,0 +1,238 @@ +package com.alibaba.himarket.service.hicoding.cli; + +import com.alibaba.himarket.config.AcpProperties.CliProviderConfig; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeConfig; +import com.alibaba.himarket.service.hicoding.sandbox.ConfigFile; +import com.alibaba.himarket.service.hicoding.session.ResolvedSessionConfig; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HexFormat; +import java.util.List; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + +/** + * 配置文件构建器。 + * + *

将 {@link ResolvedSessionConfig} 转换为 {@link ConfigFile} 列表。 封装了 {@link CliConfigGenerator} + * 调用、临时目录管理、文件收集等逻辑。 + * + *

从 {@code HiCodingWebSocketHandler.prepareConfigFiles()} 中提取。 + */ +@Component +public class ConfigFileBuilder { + + private static final Logger logger = LoggerFactory.getLogger(ConfigFileBuilder.class); + + private final Map configGeneratorRegistry; + + public ConfigFileBuilder(Map configGeneratorRegistry) { + this.configGeneratorRegistry = configGeneratorRegistry; + } + + /** + * 构建配置文件列表。 + * + * @param resolved 已解析的会话配置 + * @param providerKey CLI 提供者标识 + * @param providerConfig CLI 提供者配置 + * @param runtimeConfig 运行时配置(额外环境变量会被合并到此对象) + * @return 配置文件列表(含路径、内容、哈希) + */ + public List build( + ResolvedSessionConfig resolved, + String providerKey, + CliProviderConfig providerConfig, + RuntimeConfig runtimeConfig) { + + CliConfigGenerator generator = configGeneratorRegistry.get(providerKey); + if (generator == null) { + logger.warn("[ConfigFileBuilder] 未找到 CliConfigGenerator: providerKey={}", providerKey); + return List.of(); + } + + List configFiles = new ArrayList<>(); + Path tempDir = null; + try { + tempDir = Files.createTempDirectory("sandbox-config-"); + + // 1. 模型配置 + generateModelConfig(generator, tempDir, resolved, runtimeConfig, providerKey); + + // 2. MCP 配置 + if (resolved.getMcpServers() != null + && !resolved.getMcpServers().isEmpty() + && providerConfig.isSupportsMcp()) { + generateMcpConfig(generator, tempDir, resolved, providerKey); + } + + // 3. Skill 配置 + if (resolved.getSkills() != null + && !resolved.getSkills().isEmpty() + && providerConfig.isSupportsSkill()) { + generateSkillConfig(generator, tempDir, resolved, providerKey); + } + + // 4. 收集所有生成的文件 + configFiles = collectConfigFiles(tempDir); + + } catch (Exception e) { + logger.error( + "[ConfigFileBuilder] 配置文件构建失败: provider={}, error={}", + providerKey, + e.getMessage(), + e); + } finally { + if (tempDir != null) { + deleteDirectory(tempDir); + } + } + return configFiles; + } + + private void generateModelConfig( + CliConfigGenerator generator, + Path tempDir, + ResolvedSessionConfig resolved, + RuntimeConfig runtimeConfig, + String providerKey) { + if (resolved.getCustomModelConfig() == null) { + return; + } + try { + Map extraEnv = + generator.generateConfig(tempDir.toString(), resolved.getCustomModelConfig()); + if (extraEnv != null && !extraEnv.isEmpty()) { + if (runtimeConfig.getEnv() == null) { + runtimeConfig.setEnv(new HashMap<>()); + } + runtimeConfig.getEnv().putAll(extraEnv); + } + logger.info( + "[ConfigFileBuilder] 模型配置已准备: provider={}, baseUrl={}, modelId={}", + providerKey, + resolved.getCustomModelConfig().getBaseUrl(), + resolved.getCustomModelConfig().getModelId()); + } catch (Exception e) { + logger.error( + "[ConfigFileBuilder] 模型配置生成失败: provider={}, error={}", + providerKey, + e.getMessage(), + e); + } + } + + private void generateMcpConfig( + CliConfigGenerator generator, + Path tempDir, + ResolvedSessionConfig resolved, + String providerKey) { + try { + generator.generateMcpConfig(tempDir.toString(), resolved.getMcpServers()); + logger.info( + "[ConfigFileBuilder] MCP 配置已准备: provider={}, {} server(s)", + providerKey, + resolved.getMcpServers().size()); + } catch (Exception e) { + logger.error( + "[ConfigFileBuilder] MCP 配置生成失败: provider={}, error={}", + providerKey, + e.getMessage(), + e); + } + } + + private void generateSkillConfig( + CliConfigGenerator generator, + Path tempDir, + ResolvedSessionConfig resolved, + String providerKey) { + try { + generator.generateSkillConfig(tempDir.toString(), resolved.getSkills()); + logger.info( + "[ConfigFileBuilder] Skill 配置已准备: provider={}, {} skill(s)", + providerKey, + resolved.getSkills().size()); + } catch (Exception e) { + logger.error( + "[ConfigFileBuilder] Skill 配置生成失败: provider={}, error={}", + providerKey, + e.getMessage(), + e); + } + } + + private List collectConfigFiles(Path tempDir) throws IOException { + List configFiles = new ArrayList<>(); + Files.walk(tempDir) + .filter(Files::isRegularFile) + .forEach( + file -> { + try { + String relativePath = tempDir.relativize(file).toString(); + String content = Files.readString(file); + String hash = sha256(content); + ConfigFile.ConfigType type = inferConfigType(relativePath); + configFiles.add(new ConfigFile(relativePath, content, hash, type)); + } catch (IOException e) { + logger.warn("[ConfigFileBuilder] 读取配置文件失败: {}", e.getMessage()); + } + }); + return configFiles; + } + + /** + * 根据文件相对路径推断配置类型。 + */ + static ConfigFile.ConfigType inferConfigType(String relativePath) { + // .nacos/ 目录下的 yaml 文件识别为 SKILL_CONFIG + if (relativePath.startsWith(".nacos/") && relativePath.endsWith(".yaml")) { + return ConfigFile.ConfigType.SKILL_CONFIG; + } + if (relativePath.contains("skills") && relativePath.endsWith("SKILL.md")) { + return ConfigFile.ConfigType.SKILL_CONFIG; + } + if (relativePath.endsWith("settings.json")) { + // settings.json 现在包含合并后的模型+MCP配置,标记为 MODEL_SETTINGS + return ConfigFile.ConfigType.MODEL_SETTINGS; + } + return ConfigFile.ConfigType.CUSTOM; + } + + /** + * 计算内容的 SHA-256 哈希值。 + */ + static String sha256(String content) { + try { + MessageDigest digest = MessageDigest.getInstance("SHA-256"); + byte[] hash = digest.digest(content.getBytes(StandardCharsets.UTF_8)); + return HexFormat.of().formatHex(hash); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException("SHA-256 不可用", e); + } + } + + private void deleteDirectory(Path dir) { + try { + Files.walk(dir) + .sorted(Comparator.reverseOrder()) + .forEach( + path -> { + try { + Files.deleteIfExists(path); + } catch (IOException ignored) { + } + }); + } catch (IOException ignored) { + } + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/NacosEnvGenerator.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/NacosEnvGenerator.java new file mode 100644 index 000000000..c26fa910f --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/NacosEnvGenerator.java @@ -0,0 +1,120 @@ +package com.alibaba.himarket.service.hicoding.cli; + +import com.alibaba.himarket.service.hicoding.session.ResolvedSessionConfig; +import java.io.IOException; +import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * nacos-env.yaml 生成工具。 + * 按 nacosId 分组,为每个 Nacos 实例生成独立的配置文件。 + */ +public final class NacosEnvGenerator { + + private static final Logger logger = LoggerFactory.getLogger(NacosEnvGenerator.class); + private static final String NACOS_DIR = ".nacos"; + + private NacosEnvGenerator() {} + + /** + * 为 Skill 列表生成 nacos-env.yaml 文件。 + * 按 nacosId 分组,每个 nacosId 生成一个 .nacos/nacos-env-{nacosId}.yaml。 + */ + public static void generateNacosEnvFiles( + String workingDirectory, List skills) + throws IOException { + + Path nacosDir = Path.of(workingDirectory, NACOS_DIR); + Files.createDirectories(nacosDir); + + // 按 nacosId 分组,取每组第一个 entry 的凭证信息 + Map byNacosId = + skills.stream() + .collect( + Collectors.toMap( + ResolvedSessionConfig.ResolvedSkillEntry::getNacosId, + s -> s, + (existing, replacement) -> existing)); + + for (var entry : byNacosId.entrySet()) { + String nacosId = entry.getKey(); + var skill = entry.getValue(); + try { + String yaml = buildNacosEnvYaml(skill); + Path filePath = nacosDir.resolve("nacos-env-" + nacosId + ".yaml"); + Files.writeString(filePath, yaml); + } catch (Exception e) { + logger.error("生成 nacos-env-{}.yaml 失败: {}", nacosId, e.getMessage(), e); + } + } + } + + /** + * 从 ResolvedSkillEntry 构建 nacos-env.yaml 内容。 + */ + static String buildNacosEnvYaml(ResolvedSessionConfig.ResolvedSkillEntry skill) { + HostPort hp = parseServerAddr(skill.getServerAddr()); + + StringBuilder sb = new StringBuilder(); + sb.append("host: ").append(hp.host()).append('\n'); + sb.append("port: ").append(hp.port()).append('\n'); + + boolean isAliyun = + skill.getAccessKey() != null + && !skill.getAccessKey().isBlank() + && skill.getSecretKey() != null + && !skill.getSecretKey().isBlank(); + sb.append("authType: ").append(isAliyun ? "aliyun" : "nacos").append('\n'); + + sb.append("username: ").append(nullToEmpty(skill.getUsername())).append('\n'); + sb.append("password: ").append(nullToEmpty(skill.getPassword())).append('\n'); + sb.append("namespace: ").append(nullToEmpty(skill.getNamespace())).append('\n'); + + if (isAliyun) { + sb.append("accessKey: ").append(skill.getAccessKey()).append('\n'); + sb.append("secretKey: ").append(skill.getSecretKey()).append('\n'); + } + + return sb.toString(); + } + + /** + * 解析 serverAddr URL 为 host 和 port。 + * 支持格式: http://host:port, https://host:port, host:port + */ + static HostPort parseServerAddr(String serverAddr) { + if (serverAddr == null || serverAddr.isBlank()) { + throw new IllegalArgumentException("serverAddr 不能为空"); + } + try { + String uriStr = serverAddr; + if (!uriStr.contains("://")) { + uriStr = "http://" + uriStr; + } + URI uri = URI.create(uriStr); + String host = uri.getHost(); + int port = uri.getPort(); + if (host == null || host.isBlank()) { + throw new IllegalArgumentException("无法从 serverAddr 解析 host: " + serverAddr); + } + if (port == -1) { + port = 8848; // Nacos 默认端口 + } + return new HostPort(host, port); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("serverAddr 格式不合法: " + serverAddr, e); + } + } + + record HostPort(String host, int port) {} + + private static String nullToEmpty(String s) { + return s == null ? "" : s; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/OpenCodeConfigGenerator.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/OpenCodeConfigGenerator.java new file mode 100644 index 000000000..190507642 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/OpenCodeConfigGenerator.java @@ -0,0 +1,194 @@ +package com.alibaba.himarket.service.hicoding.cli; + +import com.alibaba.himarket.service.hicoding.session.CustomModelConfig; +import com.alibaba.himarket.service.hicoding.session.ResolvedSessionConfig; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Open Code CLI 工具的配置文件生成器。 + * 生成 opencode.json 文件到工作目录,包含自定义 provider 定义和 model 指定。 + * 支持与已有 opencode.json 合并,保留用户已有的其他配置项。 + * 支持 MCP Server 和 Skill 配置。 + */ +public class OpenCodeConfigGenerator implements CliConfigGenerator { + + private static final Logger logger = LoggerFactory.getLogger(OpenCodeConfigGenerator.class); + + private static final String CONFIG_FILE_NAME = "opencode.json"; + private static final String OPENCODE_DIR = ".opencode"; + private static final String PROVIDER_KEY = "custom-provider"; + private static final String NPM_PACKAGE = "@ai-sdk/openai-compatible"; + private static final String API_KEY_ENV_REF = "{env:CUSTOM_MODEL_API_KEY}"; + private static final String ENV_VAR_NAME = "CUSTOM_MODEL_API_KEY"; + + private final ObjectMapper objectMapper; + + public OpenCodeConfigGenerator(ObjectMapper objectMapper) { + this.objectMapper = objectMapper; + } + + @Override + public String supportedProvider() { + return "opencode"; + } + + @Override + public Map generateConfig(String workingDirectory, CustomModelConfig config) + throws IOException { + Path configPath = Path.of(workingDirectory, CONFIG_FILE_NAME); + + // 1. 读取已有配置(如存在) + Map root = readExistingConfig(configPath); + + // 2. 合并自定义 provider 配置 + mergeCustomProvider(root, config); + + // 3. 写入 opencode.json + writeConfig(configPath, root); + + // 4. 返回环境变量 map + Map envVars = new HashMap<>(); + envVars.put(ENV_VAR_NAME, config.getApiKey()); + return envVars; + } + + /** + * 读取已有的 opencode.json 配置文件。 + * 如果文件不存在,返回空 map。 + * 如果文件内容不是合法 JSON,记录警告并返回空 map(后续会覆盖)。 + */ + Map readExistingConfig(Path configPath) { + if (!Files.exists(configPath)) { + return new LinkedHashMap<>(); + } + try { + String content = Files.readString(configPath); + Map existing = + objectMapper.readValue( + content, new TypeReference>() {}); + return existing != null ? existing : new LinkedHashMap<>(); + } catch (Exception e) { + logger.warn("已有 opencode.json 不是合法 JSON,将使用全新配置覆盖: {}", e.getMessage()); + return new LinkedHashMap<>(); + } + } + + /** + * 将自定义 provider 配置合并到根配置中。 + * 保留已有的其他 provider 条目,新增或覆盖 custom-provider。 + */ + @SuppressWarnings("unchecked") + void mergeCustomProvider(Map root, CustomModelConfig config) { + String modelName = + (config.getModelName() != null && !config.getModelName().isBlank()) + ? config.getModelName() + : config.getModelId(); + + // 构建 custom-provider 的 options + Map options = new LinkedHashMap<>(); + options.put("baseURL", config.getBaseUrl()); + options.put("apiKey", API_KEY_ENV_REF); + + // 构建 model 条目 + Map modelEntry = new LinkedHashMap<>(); + modelEntry.put("name", modelName); + + Map models = new LinkedHashMap<>(); + models.put(config.getModelId(), modelEntry); + + // 构建 custom-provider + Map customProvider = new LinkedHashMap<>(); + customProvider.put("npm", NPM_PACKAGE); + customProvider.put("name", modelName); + customProvider.put("options", options); + customProvider.put("models", models); + + // 合并到 provider map(保留已有 provider) + Map providers = + root.containsKey("provider") + ? (Map) root.get("provider") + : new LinkedHashMap<>(); + providers.put(PROVIDER_KEY, customProvider); + root.put("provider", providers); + + // 设置 model 字段 + root.put("model", PROVIDER_KEY + "/" + config.getModelId()); + } + + /** + * 将配置写入 opencode.json 文件。 + */ + private void writeConfig(Path configPath, Map root) throws IOException { + String json = + objectMapper + .writer() + .with(SerializationFeature.INDENT_OUTPUT) + .writeValueAsString(root); + Files.writeString(configPath, json); + } + + @Override + public void generateMcpConfig( + String workingDirectory, List mcpServers) + throws IOException { + if (mcpServers == null || mcpServers.isEmpty()) return; + + Path configPath = Path.of(workingDirectory, CONFIG_FILE_NAME); + Map root = readExistingConfig(configPath); + mergeMcpServers(root, mcpServers); + writeConfig(configPath, root); + } + + @Override + public String skillsDirectory() { + return ".opencode/skills/"; + } + + /** + * 将 MCP Server 列表合并到根配置的 mcp 段中。 + * OpenCode 使用 "mcp" 字段(不是 "mcpServers"),格式为: + * { + * "mcp": { + * "server-name": { + * "type": "remote", + * "url": "https://...", + * "headers": { ... } + * } + * } + * } + */ + @SuppressWarnings("unchecked") + void mergeMcpServers( + Map root, List mcpServers) { + Map mcpMap = + root.containsKey("mcp") + ? (Map) root.get("mcp") + : new LinkedHashMap<>(); + + for (ResolvedSessionConfig.ResolvedMcpEntry entry : mcpServers) { + Map serverConfig = new LinkedHashMap<>(); + // OpenCode 使用 "remote" 类型表示远程 MCP 服务器 + serverConfig.put("type", "remote"); + serverConfig.put("url", entry.getUrl()); + if (entry.getHeaders() != null && !entry.getHeaders().isEmpty()) { + serverConfig.put("headers", entry.getHeaders()); + } + // 默认启用 + serverConfig.put("enabled", true); + mcpMap.put(entry.getName(), serverConfig); + } + + root.put("mcp", mcpMap); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/ProtocolTypeMapper.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/ProtocolTypeMapper.java new file mode 100644 index 000000000..85200898e --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/ProtocolTypeMapper.java @@ -0,0 +1,37 @@ +package com.alibaba.himarket.service.hicoding.cli; + +import java.util.List; + +/** + * 将 aiProtocols 列表映射为 CustomModelConfig 的 protocolType 的工具类。 + * + *

映射规则: + *

    + *
  • 列表为空或 null 时,返回 "openai"
  • + *
  • 第一个元素包含 "openai"(不区分大小写)时,返回 "openai"
  • + *
  • 第一个元素包含 "anthropic"(不区分大小写)时,返回 "anthropic"
  • + *
  • 其他情况默认返回 "openai"
  • + *
+ */ +public class ProtocolTypeMapper { + + /** + * 将 aiProtocols 列表映射为协议类型字符串。 + * + * @param aiProtocols AI 协议列表 + * @return 协议类型,"openai" 或 "anthropic" + */ + public static String map(List aiProtocols) { + if (aiProtocols == null || aiProtocols.isEmpty()) { + return "openai"; + } + String first = aiProtocols.get(0); + if (first.toLowerCase().contains("openai")) { + return "openai"; + } + if (first.toLowerCase().contains("anthropic")) { + return "anthropic"; + } + return "openai"; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/QoderCliConfigGenerator.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/QoderCliConfigGenerator.java new file mode 100644 index 000000000..ceb82594a --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/QoderCliConfigGenerator.java @@ -0,0 +1,125 @@ +package com.alibaba.himarket.service.hicoding.cli; + +import com.alibaba.himarket.service.hicoding.session.CustomModelConfig; +import com.alibaba.himarket.service.hicoding.session.ResolvedSessionConfig; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * QoderCli 工具的配置文件生成器。 + * 生成 .qoder/settings.json 文件到工作目录下的 .qoder/ 子目录, + * 将 MCP Server 配置写入 mcpServers 字段。 + * 支持与已有 .qoder/settings.json 合并,保留用户已有的其他配置项。 + */ +public class QoderCliConfigGenerator implements CliConfigGenerator { + + private static final Logger logger = LoggerFactory.getLogger(QoderCliConfigGenerator.class); + + private static final String QODER_DIR = ".qoder"; + private static final String CONFIG_FILE_NAME = "settings.json"; + + private final ObjectMapper objectMapper; + + public QoderCliConfigGenerator(ObjectMapper objectMapper) { + this.objectMapper = objectMapper; + } + + @Override + public String supportedProvider() { + return "qodercli"; + } + + @Override + public Map generateConfig(String workingDirectory, CustomModelConfig config) + throws IOException { + // QoderCli 不支持自定义模型配置,返回空 map + return Map.of(); + } + + @Override + public void generateMcpConfig( + String workingDirectory, List mcpServers) + throws IOException { + if (mcpServers == null || mcpServers.isEmpty()) return; + + Path qoderDir = Path.of(workingDirectory, QODER_DIR); + Path configPath = qoderDir.resolve(CONFIG_FILE_NAME); + Files.createDirectories(qoderDir); + + Map root = readExistingConfig(configPath); + mergeMcpServers(root, mcpServers); + writeConfig(configPath, root); + } + + /** + * 将 MCP Server 列表合并到根配置的 mcpServers 段中。 + * 按 name 去重,新条目覆盖同名旧条目。 + */ + @SuppressWarnings("unchecked") + void mergeMcpServers( + Map root, List mcpServers) { + Map mcpServersMap = + root.containsKey("mcpServers") + ? (Map) root.get("mcpServers") + : new LinkedHashMap<>(); + + for (ResolvedSessionConfig.ResolvedMcpEntry entry : mcpServers) { + Map serverConfig = new LinkedHashMap<>(); + serverConfig.put("url", entry.getUrl()); + serverConfig.put("type", entry.getTransportType()); + if (entry.getHeaders() != null && !entry.getHeaders().isEmpty()) { + serverConfig.put("headers", entry.getHeaders()); + } + mcpServersMap.put(entry.getName(), serverConfig); + } + + root.put("mcpServers", mcpServersMap); + } + + /** + * 读取已有的 .qoder/settings.json 配置文件。 + * 如果文件不存在,返回空 map。 + * 如果文件内容不是合法 JSON,记录警告并返回空 map(后续会覆盖)。 + */ + Map readExistingConfig(Path configPath) { + if (!Files.exists(configPath)) { + return new LinkedHashMap<>(); + } + try { + String content = Files.readString(configPath); + Map existing = + objectMapper.readValue( + content, new TypeReference>() {}); + return existing != null ? existing : new LinkedHashMap<>(); + } catch (Exception e) { + logger.warn("已有 .qoder/settings.json 不是合法 JSON,将使用全新配置覆盖: {}", e.getMessage()); + return new LinkedHashMap<>(); + } + } + + /** + * 将配置写入 .qoder/settings.json 文件。 + */ + private void writeConfig(Path configPath, Map root) throws IOException { + String json = + objectMapper + .writer() + .with(SerializationFeature.INDENT_OUTPUT) + .writeValueAsString(root); + Files.writeString(configPath, json); + } + + @Override + public String skillsDirectory() { + return ".qoder/skills/"; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/QwenCodeConfigGenerator.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/QwenCodeConfigGenerator.java new file mode 100644 index 000000000..a3e0940dc --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/cli/QwenCodeConfigGenerator.java @@ -0,0 +1,228 @@ +package com.alibaba.himarket.service.hicoding.cli; + +import com.alibaba.himarket.service.hicoding.session.CustomModelConfig; +import com.alibaba.himarket.service.hicoding.session.ResolvedSessionConfig; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Qwen Code CLI 工具的配置文件生成器。 + * 生成 .qwen/settings.json 文件到工作目录下的 .qwen/ 子目录, + * 包含 modelProviders 和 env 字段。 + * 支持与已有 .qwen/settings.json 合并,保留用户已有的其他配置项。 + */ +public class QwenCodeConfigGenerator implements CliConfigGenerator { + + private static final Logger logger = LoggerFactory.getLogger(QwenCodeConfigGenerator.class); + + private static final String QWEN_DIR = ".qwen"; + private static final String CONFIG_FILE_NAME = "settings.json"; + + /** 协议类型到环境变量名的映射 */ + private static final Map PROTOCOL_ENV_KEY_MAP = + Map.of( + "openai", "OPENAI_API_KEY", + "anthropic", "ANTHROPIC_API_KEY", + "gemini", "GOOGLE_API_KEY"); + + private final ObjectMapper objectMapper; + + public QwenCodeConfigGenerator(ObjectMapper objectMapper) { + this.objectMapper = objectMapper; + } + + @Override + public String supportedProvider() { + return "qwen-code"; + } + + @Override + public Map generateConfig(String workingDirectory, CustomModelConfig config) + throws IOException { + Path qwenDir = Path.of(workingDirectory, QWEN_DIR); + Path configPath = qwenDir.resolve(CONFIG_FILE_NAME); + + // 1. 创建 .qwen/ 目录 + Files.createDirectories(qwenDir); + + // 2. 读取已有 settings.json(如存在) + Map root = readExistingConfig(configPath); + + // 3. 合并自定义 modelProviders 配置 + mergeCustomModelProvider(root, config); + + // 4. 写入 .qwen/settings.json + writeConfig(configPath, root); + + // 5. 返回对应的环境变量 map + String envKey = getEnvKeyForProtocol(config.getProtocolType()); + Map envVars = new HashMap<>(); + envVars.put(envKey, config.getApiKey()); + return envVars; + } + + @Override + public void generateMcpConfig( + String workingDirectory, List mcpServers) + throws IOException { + if (mcpServers == null || mcpServers.isEmpty()) return; + + Path qwenDir = Path.of(workingDirectory, QWEN_DIR); + Path configPath = qwenDir.resolve(CONFIG_FILE_NAME); + Files.createDirectories(qwenDir); + + Map root = readExistingConfig(configPath); + mergeMcpServers(root, mcpServers); + writeConfig(configPath, root); + } + + @Override + public String skillsDirectory() { + return ".qwen/skills/"; + } + + /** + * 将 MCP Server 列表合并到根配置的 mcpServers 段中。 + * 按 name 去重,新条目覆盖同名旧条目。 + */ + @SuppressWarnings("unchecked") + void mergeMcpServers( + Map root, List mcpServers) { + Map mcpServersMap = + root.containsKey("mcpServers") + ? (Map) root.get("mcpServers") + : new LinkedHashMap<>(); + + for (ResolvedSessionConfig.ResolvedMcpEntry entry : mcpServers) { + Map serverConfig = new LinkedHashMap<>(); + serverConfig.put("url", entry.getUrl()); + serverConfig.put("type", entry.getTransportType()); + if (entry.getHeaders() != null && !entry.getHeaders().isEmpty()) { + serverConfig.put("headers", entry.getHeaders()); + } + mcpServersMap.put(entry.getName(), serverConfig); + } + + root.put("mcpServers", mcpServersMap); + } + + /** + * 读取已有的 .qwen/settings.json 配置文件。 + * 如果文件不存在,返回空 map。 + * 如果文件内容不是合法 JSON,记录警告并返回空 map(后续会覆盖)。 + */ + Map readExistingConfig(Path configPath) { + if (!Files.exists(configPath)) { + return new LinkedHashMap<>(); + } + try { + String content = Files.readString(configPath); + Map existing = + objectMapper.readValue( + content, new TypeReference>() {}); + return existing != null ? existing : new LinkedHashMap<>(); + } catch (Exception e) { + logger.warn("已有 .qwen/settings.json 不是合法 JSON,将使用全新配置覆盖: {}", e.getMessage()); + return new LinkedHashMap<>(); + } + } + + /** + * 将自定义模型配置合并到根配置的 modelProviders 和 env 中。 + * 保留已有的其他 provider 条目,在对应 protocolType 的 provider 列表中追加或替换模型。 + * 如果已存在相同 id 的模型,用新配置替换它(用户明确想用自定义接入点)。 + */ + @SuppressWarnings("unchecked") + void mergeCustomModelProvider(Map root, CustomModelConfig config) { + String protocolType = config.getProtocolType(); + String envKey = getEnvKeyForProtocol(protocolType); + String modelName = + (config.getModelName() != null && !config.getModelName().isBlank()) + ? config.getModelName() + : config.getModelId(); + + // 构建模型条目 + Map modelEntry = new LinkedHashMap<>(); + modelEntry.put("id", config.getModelId()); + modelEntry.put("name", modelName); + modelEntry.put("envKey", envKey); + modelEntry.put("baseUrl", config.getBaseUrl()); + + // 合并到 modelProviders(保留已有 provider) + Map modelProviders = + root.containsKey("modelProviders") + ? (Map) root.get("modelProviders") + : new LinkedHashMap<>(); + + // 获取或创建对应 protocolType 的 provider 列表 + List> providerList = + modelProviders.containsKey(protocolType) + ? (List>) modelProviders.get(protocolType) + : new java.util.ArrayList<>(); + + // 移除已有的相同 id 的模型条目(避免重复,用新配置替换) + providerList.removeIf(entry -> config.getModelId().equals(entry.get("id"))); + + providerList.add(modelEntry); + modelProviders.put(protocolType, providerList); + root.put("modelProviders", modelProviders); + + // 设置 security.auth.selectedType,告诉 qwen CLI 已选择认证方式,跳过登录交互 + Map security = + root.containsKey("security") + ? (Map) root.get("security") + : new LinkedHashMap<>(); + Map auth = + security.containsKey("auth") + ? (Map) security.get("auth") + : new LinkedHashMap<>(); + auth.put("selectedType", protocolType); + security.put("auth", auth); + root.put("security", security); + + // 设置 model.name,指定默认使用的模型 + Map model = + root.containsKey("model") + ? (Map) root.get("model") + : new LinkedHashMap<>(); + model.put("name", config.getModelId()); + root.put("model", model); + + // 沙箱环境设置 tools.approvalMode 为 yolo,自动批准所有工具调用,避免非交互模式下卡在确认提示 + Map tools = + root.containsKey("tools") + ? (Map) root.get("tools") + : new LinkedHashMap<>(); + tools.put("approvalMode", "yolo"); + root.put("tools", tools); + } + + /** + * 根据协议类型获取对应的环境变量名。 + */ + static String getEnvKeyForProtocol(String protocolType) { + return PROTOCOL_ENV_KEY_MAP.getOrDefault(protocolType, "OPENAI_API_KEY"); + } + + /** + * 将配置写入 .qwen/settings.json 文件。 + */ + private void writeConfig(Path configPath, Map root) throws IOException { + String json = + objectMapper + .writer() + .with(SerializationFeature.INDENT_OUTPUT) + .writeValueAsString(root); + Files.writeString(configPath, json); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/BaseUrlExtractor.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/BaseUrlExtractor.java new file mode 100644 index 000000000..c1a8680de --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/BaseUrlExtractor.java @@ -0,0 +1,88 @@ +package com.alibaba.himarket.service.hicoding.filesystem; + +import com.alibaba.himarket.dto.result.common.DomainResult; +import com.alibaba.himarket.dto.result.httpapi.HttpRouteResult; +import java.util.List; + +/** + * 从 MODEL_API 产品的路由配置中提取 baseUrl 的工具类。 + * + *

提取规则: + *

    + *
  • 从 routes[0].domains[0] 中获取 protocol、domain、port
  • + *
  • 从 routes[0].match.path.value 中获取 pathPrefix(去掉 /chat/completions 后缀)
  • + *
  • 端口处理:null 或标准端口(http:80, https:443)时省略,非标准端口时包含
  • + *
+ * + *

输出格式:{protocol}://{domain}[:{port}]{pathPrefix} + */ +public class BaseUrlExtractor { + + private static final String CHAT_COMPLETIONS_SUFFIX = "/chat/completions"; + private static final int HTTP_DEFAULT_PORT = 80; + private static final int HTTPS_DEFAULT_PORT = 443; + + /** + * 从产品的路由配置中提取 baseUrl。 + * + * @param routes 产品的路由列表 + * @return 提取的 baseUrl,如果路由数据不完整则返回 null + */ + public static String extract(List routes) { + if (routes == null || routes.isEmpty()) { + return null; + } + + HttpRouteResult firstRoute = routes.get(0); + + // 提取 domain 信息 + List domains = firstRoute.getDomains(); + if (domains == null || domains.isEmpty()) { + return null; + } + + DomainResult domain = domains.get(0); + if (domain.getDomain() == null || domain.getProtocol() == null) { + return null; + } + + // 提取 path + if (firstRoute.getMatch() == null + || firstRoute.getMatch().getPath() == null + || firstRoute.getMatch().getPath().getValue() == null) { + return null; + } + + String protocol = domain.getProtocol(); + String host = domain.getDomain(); + Integer port = domain.getPort(); + String pathValue = firstRoute.getMatch().getPath().getValue(); + + // 拼接 baseUrl + StringBuilder sb = new StringBuilder(); + sb.append(protocol).append("://").append(host); + + // 端口处理:null 或标准端口时省略 + if (port != null && !isStandardPort(protocol, port)) { + sb.append(":").append(port); + } + + // path 处理:去掉 /chat/completions 后缀 + String pathPrefix = stripChatCompletionsSuffix(pathValue); + sb.append(pathPrefix); + + return sb.toString(); + } + + private static boolean isStandardPort(String protocol, int port) { + return ("http".equalsIgnoreCase(protocol) && port == HTTP_DEFAULT_PORT) + || ("https".equalsIgnoreCase(protocol) && port == HTTPS_DEFAULT_PORT); + } + + private static String stripChatCompletionsSuffix(String path) { + if (path.endsWith(CHAT_COMPLETIONS_SUFFIX)) { + return path.substring(0, path.length() - CHAT_COMPLETIONS_SUFFIX.length()); + } + return path; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileEntry.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileEntry.java new file mode 100644 index 000000000..55abb1c81 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileEntry.java @@ -0,0 +1,6 @@ +package com.alibaba.himarket.service.hicoding.filesystem; + +/** + * 文件目录条目。 + */ +public record FileEntry(String name, boolean isDirectory, long size, long lastModified) {} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileInfo.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileInfo.java new file mode 100644 index 000000000..1fd9664f8 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileInfo.java @@ -0,0 +1,12 @@ +package com.alibaba.himarket.service.hicoding.filesystem; + +/** + * 文件元信息。 + */ +public record FileInfo( + String path, + boolean isDirectory, + long size, + long lastModified, + boolean readable, + boolean writable) {} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileSystemAdapter.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileSystemAdapter.java new file mode 100644 index 000000000..3685f0df4 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileSystemAdapter.java @@ -0,0 +1,65 @@ +package com.alibaba.himarket.service.hicoding.filesystem; + +import java.io.IOException; +import java.util.List; + +/** + * 统一的文件操作接口。 + *

+ * 为不同运行时提供一致的文件访问能力,屏蔽底层文件系统差异 + * (本地文件系统、K8s Pod 文件系统)。 + */ +public interface FileSystemAdapter { + + /** + * 读取文件内容。 + * + * @param relativePath 相对于工作空间根目录的路径 + * @return 文件内容 + * @throws IOException 读取失败时抛出 + */ + String readFile(String relativePath) throws IOException; + + /** + * 写入文件内容。 + * + * @param relativePath 相对于工作空间根目录的路径 + * @param content 文件内容 + * @throws IOException 写入失败时抛出 + */ + void writeFile(String relativePath, String content) throws IOException; + + /** + * 列举目录内容。 + * + * @param relativePath 相对于工作空间根目录的路径 + * @return 目录条目列表 + * @throws IOException 列举失败时抛出 + */ + List listDirectory(String relativePath) throws IOException; + + /** + * 创建目录。 + * + * @param relativePath 相对于工作空间根目录的路径 + * @throws IOException 创建失败时抛出 + */ + void createDirectory(String relativePath) throws IOException; + + /** + * 删除文件或目录。 + * + * @param relativePath 相对于工作空间根目录的路径 + * @throws IOException 删除失败时抛出 + */ + void delete(String relativePath) throws IOException; + + /** + * 获取文件元信息。 + * + * @param relativePath 相对于工作空间根目录的路径 + * @return 文件元信息 + * @throws IOException 获取失败时抛出 + */ + FileInfo getFileInfo(String relativePath) throws IOException; +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileSystemException.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileSystemException.java new file mode 100644 index 000000000..65e6f895e --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/FileSystemException.java @@ -0,0 +1,54 @@ +package com.alibaba.himarket.service.hicoding.filesystem; + +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import java.io.IOException; + +/** + * 文件系统操作异常,包含结构化的错误信息。 + *

+ * 统一错误格式包含 errorType(错误类型)和 sandboxType(沙箱类型), + * 便于上层业务代码进行统一的错误处理和展示。 + */ +public class FileSystemException extends IOException { + + /** 文件系统错误类型枚举 */ + public enum ErrorType { + FILE_NOT_FOUND, + PERMISSION_DENIED, + PATH_TRAVERSAL, + DISK_FULL, + NOT_A_DIRECTORY, + NOT_A_FILE, + ALREADY_EXISTS, + IO_ERROR + } + + private final ErrorType errorType; + private final SandboxType sandboxType; + + public FileSystemException(ErrorType errorType, SandboxType sandboxType, String message) { + super(formatMessage(errorType, sandboxType, message)); + this.errorType = errorType; + this.sandboxType = sandboxType; + } + + public FileSystemException( + ErrorType errorType, SandboxType sandboxType, String message, Throwable cause) { + super(formatMessage(errorType, sandboxType, message), cause); + this.errorType = errorType; + this.sandboxType = sandboxType; + } + + public ErrorType getErrorType() { + return errorType; + } + + public SandboxType getSandboxType() { + return sandboxType; + } + + private static String formatMessage( + ErrorType errorType, SandboxType sandboxType, String message) { + return "[" + sandboxType + "][" + errorType + "] " + message; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/PathValidator.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/PathValidator.java new file mode 100644 index 000000000..313426ba3 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/PathValidator.java @@ -0,0 +1,91 @@ +package com.alibaba.himarket.service.hicoding.filesystem; + +import java.nio.file.InvalidPathException; +import java.nio.file.Path; + +/** + * 路径安全校验工具类。 + *

+ * 防止路径遍历攻击,确保所有文件操作限制在用户工作空间范围内。 + */ +public final class PathValidator { + + private PathValidator() { + // 工具类,禁止实例化 + } + + /** + * 校验相对路径的安全性,确保解析后的路径不会逃逸出基础目录。 + * + * @param basePath 工作空间基础目录的绝对路径 + * @param relativePath 待校验的相对路径 + * @return 校验通过后的规范化绝对路径 + * @throws SecurityException 当路径不安全时抛出 + */ + public static Path validatePath(String basePath, String relativePath) { + if (basePath == null || basePath.isEmpty()) { + throw new SecurityException("基础路径不能为空"); + } + if (relativePath == null || relativePath.isEmpty()) { + throw new SecurityException("相对路径不能为空"); + } + + // 拒绝包含 null 字节的路径 + if (relativePath.indexOf('\0') >= 0) { + throw new SecurityException("路径包含非法的 null 字节: " + sanitize(relativePath)); + } + + // 拒绝绝对路径(Unix 风格 / 或 Windows 驱动器号如 C:\) + if (relativePath.startsWith("/") || relativePath.startsWith("\\")) { + throw new SecurityException("不允许使用绝对路径: " + sanitize(relativePath)); + } + if (relativePath.length() >= 2 + && Character.isLetter(relativePath.charAt(0)) + && (relativePath.charAt(1) == ':')) { + throw new SecurityException("不允许使用绝对路径: " + sanitize(relativePath)); + } + + // 拒绝包含路径遍历模式的路径 + if (containsTraversalPattern(relativePath)) { + throw new SecurityException("路径包含非法的遍历模式: " + sanitize(relativePath)); + } + + // 规范化并验证最终路径在基础目录内 + try { + Path base = Path.of(basePath).toAbsolutePath().normalize(); + Path resolved = base.resolve(relativePath).normalize(); + + if (!resolved.startsWith(base)) { + throw new SecurityException("路径逃逸出工作空间目录: " + sanitize(relativePath)); + } + return resolved; + } catch (InvalidPathException e) { + throw new SecurityException("路径格式无效: " + sanitize(relativePath), e); + } + } + + /** + * 检查路径是否包含遍历模式(../ 或 ..\)。 + */ + private static boolean containsTraversalPattern(String path) { + // 检查 ../ 和 ..\ 模式 + if (path.contains("../") || path.contains("..\\")) { + return true; + } + // 检查路径是否恰好是 ".." 或以 ".." 结尾 + if (path.equals("..") || path.endsWith("/..") || path.endsWith("\\..")) { + return true; + } + // 检查以 ../ 或 ..\ 开头 + return path.startsWith("..") + && path.length() > 2 + && (path.charAt(2) == '/' || path.charAt(2) == '\\'); + } + + /** + * 清理路径字符串用于日志/异常消息,移除 null 字节等不可见字符。 + */ + private static String sanitize(String path) { + return path.replace("\0", "\\0"); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/SidecarFileSystemAdapter.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/SidecarFileSystemAdapter.java new file mode 100644 index 000000000..49aed9780 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/filesystem/SidecarFileSystemAdapter.java @@ -0,0 +1,196 @@ +package com.alibaba.himarket.service.hicoding.filesystem; + +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.net.ConnectException; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpConnectTimeoutException; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * 基于 Sidecar HTTP API 的文件系统适配器。 + *

+ * 替代已废弃的 PodFileSystemAdapter,通过 Pod 内 Sidecar 提供的 HTTP 端点 + * 实现文件读写操作,避免使用不稳定的 kubectl exec 通道。 + */ +public class SidecarFileSystemAdapter implements FileSystemAdapter { + + private static final Logger logger = LoggerFactory.getLogger(SidecarFileSystemAdapter.class); + private static final SandboxType SANDBOX_TYPE = SandboxType.REMOTE; + private static final int SIDECAR_PORT = 8080; + private static final Duration HTTP_TIMEOUT = Duration.ofSeconds(30); + private static final String DEFAULT_BASE_PATH = "/workspace"; + + private final String host; + private final String basePath; + private final HttpClient httpClient; + private final ObjectMapper objectMapper; + + /** + * @param host Sidecar 访问地址(Pod IP 或 Service IP) + * @param basePath 工作空间根目录(默认 "/workspace") + */ + public SidecarFileSystemAdapter(String host, String basePath) { + if (host == null || host.isBlank()) { + throw new IllegalArgumentException("host must not be null or blank"); + } + this.host = host; + this.basePath = basePath != null ? basePath : DEFAULT_BASE_PATH; + this.httpClient = + HttpClient.newBuilder() + .connectTimeout(HTTP_TIMEOUT) + .version(HttpClient.Version.HTTP_1_1) + .build(); + this.objectMapper = new ObjectMapper(); + } + + /** + * 使用默认基础路径构造适配器。 + */ + public SidecarFileSystemAdapter(String host) { + this(host, DEFAULT_BASE_PATH); + } + + @Override + public String readFile(String relativePath) throws IOException { + String fullPath = resolveAndValidate(relativePath); + String url = buildUrl("/files/read"); + String body = objectMapper.writeValueAsString(Map.of("path", fullPath)); + HttpResponse response = sendPost(url, body); + if (response.statusCode() == 404) { + throw new FileSystemException( + FileSystemException.ErrorType.FILE_NOT_FOUND, + SANDBOX_TYPE, + "文件不存在: " + relativePath); + } + if (response.statusCode() != 200) { + throw new FileSystemException( + FileSystemException.ErrorType.IO_ERROR, + SANDBOX_TYPE, + "读取文件失败: " + relativePath + " (status=" + response.statusCode() + ")"); + } + JsonNode json = objectMapper.readTree(response.body()); + return json.has("content") ? json.get("content").asText() : ""; + } + + @Override + public void writeFile(String relativePath, String content) throws IOException { + String fullPath = resolveAndValidate(relativePath); + String url = buildUrl("/files/write"); + String body = objectMapper.writeValueAsString(Map.of("path", fullPath, "content", content)); + HttpResponse response = sendPost(url, body); + if (response.statusCode() != 200) { + throw new FileSystemException( + FileSystemException.ErrorType.IO_ERROR, + SANDBOX_TYPE, + "写入文件失败: " + relativePath + " (status=" + response.statusCode() + ")"); + } + } + + @Override + public List listDirectory(String relativePath) throws IOException { + String fullPath = resolveAndValidate(relativePath); + String url = buildUrl("/files/list"); + String body = objectMapper.writeValueAsString(Map.of("path", fullPath, "depth", 1)); + HttpResponse response = sendPost(url, body); + if (response.statusCode() == 404) { + throw new FileSystemException( + FileSystemException.ErrorType.FILE_NOT_FOUND, + SANDBOX_TYPE, + "目录不存在: " + relativePath); + } + if (response.statusCode() != 200) { + throw new FileSystemException( + FileSystemException.ErrorType.IO_ERROR, + SANDBOX_TYPE, + "列举目录失败: " + relativePath + " (status=" + response.statusCode() + ")"); + } + List> items = + objectMapper.readValue(response.body(), new TypeReference<>() {}); + List entries = new ArrayList<>(); + for (Map item : items) { + String name = (String) item.get("name"); + boolean isDir = "dir".equals(item.get("type")); + long size = item.containsKey("size") ? ((Number) item.get("size")).longValue() : 0L; + entries.add(new FileEntry(name, isDir, size, 0L)); + } + return entries; + } + + @Override + public void createDirectory(String relativePath) throws IOException { + String fullPath = resolveAndValidate(relativePath); + String url = buildUrl("/files/mkdir"); + String body = objectMapper.writeValueAsString(Map.of("path", fullPath)); + HttpResponse response = sendPost(url, body); + if (response.statusCode() != 200) { + throw new FileSystemException( + FileSystemException.ErrorType.IO_ERROR, + SANDBOX_TYPE, + "创建目录失败: " + relativePath + " (status=" + response.statusCode() + ")"); + } + } + + @Override + public void delete(String relativePath) throws IOException { + // Sidecar 可能没有 delete 端点,暂时抛出不支持异常 + throw new FileSystemException( + FileSystemException.ErrorType.IO_ERROR, SANDBOX_TYPE, "Sidecar 不支持删除操作"); + } + + @Override + public FileInfo getFileInfo(String relativePath) throws IOException { + // Sidecar 可能没有 stat 端点,暂时抛出不支持异常 + throw new FileSystemException( + FileSystemException.ErrorType.IO_ERROR, SANDBOX_TYPE, "Sidecar 不支持获取文件信息操作"); + } + + // ===== 内部辅助方法 ===== + + private String resolveAndValidate(String relativePath) throws FileSystemException { + try { + PathValidator.validatePath(basePath, relativePath); + } catch (SecurityException e) { + throw new FileSystemException( + FileSystemException.ErrorType.PATH_TRAVERSAL, SANDBOX_TYPE, e.getMessage()); + } + return basePath + "/" + relativePath; + } + + private String buildUrl(String endpoint) { + return "http://" + host + ":" + SIDECAR_PORT + endpoint; + } + + private HttpResponse sendPost(String url, String body) throws IOException { + try { + return httpClient.send( + HttpRequest.newBuilder(URI.create(url)) + .POST(HttpRequest.BodyPublishers.ofString(body)) + .header("Content-Type", "application/json") + .timeout(HTTP_TIMEOUT) + .build(), + HttpResponse.BodyHandlers.ofString()); + } catch (ConnectException | HttpConnectTimeoutException e) { + logger.error("Sidecar 不可达: {}", url, e); + throw new FileSystemException( + FileSystemException.ErrorType.IO_ERROR, + SANDBOX_TYPE, + "Sidecar 连接失败: " + url, + e); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new IOException("Sidecar 请求被中断: " + url, e); + } + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RemoteRuntimeAdapter.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RemoteRuntimeAdapter.java new file mode 100644 index 000000000..94ec0a2bb --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RemoteRuntimeAdapter.java @@ -0,0 +1,520 @@ +package com.alibaba.himarket.service.hicoding.runtime; + +import com.alibaba.himarket.service.hicoding.filesystem.FileSystemAdapter; +import com.alibaba.himarket.service.hicoding.filesystem.SidecarFileSystemAdapter; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.net.URI; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.reactive.socket.WebSocketMessage; +import org.springframework.web.reactive.socket.client.ReactorNettyWebSocketClient; +import reactor.core.Disposable; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.core.publisher.Sinks; + +/** + * 远程 Sidecar 运行时适配器。 + * + *

通过 WebSocket 连接远程 Sidecar 服务与 CLI 进程通信。 不依赖 K8s API,健康状态完全基于 WebSocket + * 连接状态判断。 Sidecar 可以部署在 K8s、Docker、裸机等任意环境。 + * + *

支持 detach/reconnect 语义:WebSocket 断开时进入 DETACHED 状态, sidecar 端的 CLI + * 进程继续运行并缓冲输出,后续可通过 reconnect() 重新连接。 + */ +public class RemoteRuntimeAdapter implements RuntimeAdapter { + + private static final Logger logger = LoggerFactory.getLogger(RemoteRuntimeAdapter.class); + private static final ObjectMapper CONTROL_MSG_MAPPER = new ObjectMapper(); + + static final long WS_PING_INTERVAL_SECONDS = 10; + + private final String host; + private final int port; + + private final Sinks.Many stdoutSink = + Sinks.many().multicast().onBackpressureBuffer(256, false); + private Sinks.Many wsSendSink = Sinks.many().unicast().onBackpressureBuffer(); + private volatile RuntimeStatus status = RuntimeStatus.CREATING; + private volatile String sidecarSessionId; + private URI sidecarWsUri; + private SidecarFileSystemAdapter fileSystem; + private Disposable wsConnection; + private ScheduledFuture wsPingFuture; + private final AtomicReference + wsSessionRef = new AtomicReference<>(); + private final ScheduledExecutorService scheduler; + + private Consumer faultListener; + + public RemoteRuntimeAdapter(String host, int port) { + if (host == null || host.isBlank()) { + throw new IllegalArgumentException("host must not be null or blank"); + } + this.host = host; + this.port = port; + this.fileSystem = new SidecarFileSystemAdapter(host); + this.scheduler = + Executors.newSingleThreadScheduledExecutor( + r -> { + Thread t = new Thread(r, "remote-runtime-scheduler"); + t.setDaemon(true); + return t; + }); + } + + @Override + public SandboxType getType() { + return SandboxType.REMOTE; + } + + @Override + public String start(RuntimeConfig config) throws RuntimeException { + throw new UnsupportedOperationException("使用 connect(URI) 方法连接远程 Sidecar"); + } + + /** + * 连接到远程 Sidecar WebSocket 端点。 + */ + public void connect(URI wsUri) { + if (status != RuntimeStatus.CREATING) { + throw new RuntimeException("Cannot connect: current status is " + status); + } + this.sidecarWsUri = wsUri; + + try { + connectWebSocket(wsUri); + startWsPing(); + status = RuntimeStatus.RUNNING; + } catch (Exception e) { + status = RuntimeStatus.ERROR; + throw new RuntimeException("Failed to connect to remote sidecar: " + e.getMessage(), e); + } + } + + /** + * 获取 sidecar 分配的会话 ID。 通过 sidecar 的 session_meta 控制消息获取,首次连接后可用。 + */ + public String getSidecarSessionId() { + return sidecarSessionId; + } + + /** + * 将适配器从 RUNNING 状态切换到 DETACHED 状态。 关闭 WebSocket 连接和 ping,但保留 stdoutSink + * 以供后续 reattach。 + */ + public void detach() { + if (status != RuntimeStatus.RUNNING) { + logger.warn("Cannot detach: current status is {}", status); + return; + } + logger.info( + "Detaching RemoteRuntimeAdapter: host={}:{}, sidecarSessionId={}", + host, + port, + sidecarSessionId); + + // 先设置状态,使得 WS 关闭触发的 doOnError/doOnComplete 不会误判为异常 + status = RuntimeStatus.DETACHED; + + if (wsPingFuture != null) { + wsPingFuture.cancel(false); + wsPingFuture = null; + } + + wsSendSink.tryEmitComplete(); + + if (wsConnection != null) { + wsConnection.dispose(); + wsConnection = null; + } + var wsSession = wsSessionRef.getAndSet(null); + if (wsSession != null) { + wsSession.close().subscribe(); + } + // 注意:不 complete stdoutSink,保持它可用于 reattach + } + + /** + * 从 DETACHED 状态重新连接到 sidecar,使用已存储的 sidecarSessionId。 + */ + public void reconnect() { + if (sidecarSessionId == null) { + throw new RuntimeException("Cannot reconnect: no sidecarSessionId available"); + } + URI attachUri = + URI.create( + "ws://" + + host + + ":" + + port + + "/?sessionId=" + + URLEncoder.encode(sidecarSessionId, StandardCharsets.UTF_8)); + reconnect(attachUri); + } + + /** + * 从 DETACHED 状态重新连接到指定的 sidecar WebSocket URI。 + */ + public void reconnect(URI wsUri) { + if (status != RuntimeStatus.DETACHED) { + throw new RuntimeException("Cannot reconnect: current status is " + status); + } + logger.info("Reconnecting RemoteRuntimeAdapter to: {}", wsUri); + + this.sidecarWsUri = wsUri; + this.wsSendSink = Sinks.many().unicast().onBackpressureBuffer(); + + try { + connectWebSocket(wsUri); + startWsPing(); + status = RuntimeStatus.RUNNING; + } catch (Exception e) { + status = RuntimeStatus.ERROR; + throw new RuntimeException( + "Failed to reconnect to remote sidecar: " + e.getMessage(), e); + } + } + + @Override + public void send(String jsonLine) throws IOException { + if (status != RuntimeStatus.RUNNING) { + throw new IOException("Remote runtime is not running, current status: " + status); + } + Sinks.EmitResult result = wsSendSink.tryEmitNext(jsonLine); + if (result.isFailure()) { + throw new IOException("Failed to send message to sidecar, emit result: " + result); + } + } + + @Override + public Flux stdout() { + return stdoutSink.asFlux(); + } + + @Override + public RuntimeStatus getStatus() { + if (status == RuntimeStatus.RUNNING) { + var session = wsSessionRef.get(); + if (session == null || !session.isOpen()) { + status = RuntimeStatus.ERROR; + } + } + return status; + } + + @Override + public boolean isAlive() { + if (status != RuntimeStatus.RUNNING) { + return false; + } + var session = wsSessionRef.get(); + return session != null && session.isOpen(); + } + + @Override + public void close() { + if (status == RuntimeStatus.STOPPED) { + return; + } + logger.info("Closing RemoteRuntimeAdapter: host={}:{}", host, port); + + if (wsPingFuture != null) { + wsPingFuture.cancel(false); + wsPingFuture = null; + } + + wsSendSink.tryEmitComplete(); + if (wsConnection != null) { + wsConnection.dispose(); + wsConnection = null; + } + var wsSession = wsSessionRef.getAndSet(null); + if (wsSession != null) { + wsSession.close().subscribe(); + } + + stdoutSink.tryEmitComplete(); + scheduler.shutdownNow(); + status = RuntimeStatus.STOPPED; + } + + @Override + public FileSystemAdapter getFileSystem() { + return fileSystem; + } + + // ===== 公共方法 ===== + + public void setFaultListener(Consumer listener) { + this.faultListener = listener; + } + + // ===== 内部方法 ===== + + private void connectWebSocket(URI wsUri) { + logger.info("Connecting to remote sidecar WebSocket: {}", wsUri); + ReactorNettyWebSocketClient wsClient = + new ReactorNettyWebSocketClient( + reactor.netty.http.client.HttpClient.create() + .responseTimeout(Duration.ofSeconds(30))); + wsClient.setHandlePing(true); + wsClient.setMaxFramePayloadLength(1024 * 1024); + CountDownLatch connectedLatch = new CountDownLatch(1); + + wsConnection = + wsClient.execute( + wsUri, + session -> { + wsSessionRef.set(session); + logger.info( + "[WS-Remote] Session established: host={}:{}," + + " sessionId={}", + host, + port, + session.getId()); + + Mono receive = + session.receive() + .doOnNext( + msg -> { + if (msg.getType() + == WebSocketMessage.Type + .PONG) { + return; + } + String text = + msg.getPayloadAsText(); + + // Sidecar 可能在单个 WebSocket + // 帧中发送多条 JSONL + // 消息(换行分隔), + // 需逐行拆分后分别处理, + // 否则前端 JSON.parse 会失败 + if (text.indexOf('\n') < 0) { + processReceivedLine(text); + } else { + for (String line : + text.split("\n")) { + if (!line.isBlank()) { + processReceivedLine( + line); + } + } + } + }) + .doOnError( + err -> { + // detach 期间 WS 关闭是预期行为 + if (status + == RuntimeStatus.DETACHED) { + logger.debug( + "[WS-Remote] WS error" + + " during detach" + + " (expected): {}", + err.getMessage()); + return; + } + logger.warn( + "[WS-Remote] Receive error:" + + " {}", + err.getMessage()); + status = RuntimeStatus.ERROR; + notifyFault( + RuntimeFaultNotification + .FAULT_CONNECTION_LOST, + RuntimeFaultNotification + .ACTION_RECONNECT); + }) + .doOnComplete( + () -> { + if (status + == RuntimeStatus.DETACHED) { + logger.debug( + "[WS-Remote] WS" + + " completed" + + " during detach" + + " (expected)"); + return; + } + logger.warn( + "[WS-Remote] Receive stream" + + " completed (sidecar" + + " closed)"); + status = RuntimeStatus.ERROR; + notifyFault( + RuntimeFaultNotification + .FAULT_CONNECTION_LOST, + RuntimeFaultNotification + .ACTION_RECONNECT); + }) + .then(); + + Mono send = + session.send( + wsSendSink + .asFlux() + .doOnNext( + msg -> + logger.info( + "[WS-Remote]" + + " Sending:" + + " {}", + msg)) + .map(session::textMessage)); + + connectedLatch.countDown(); + return Mono.when(receive, send); + }) + .subscribe( + unused -> logger.info("[WS-Remote] Connection completed normally"), + err -> { + logger.error( + "[WS-Remote] Connection failed: {}", err.getMessage()); + connectedLatch.countDown(); + }); + + try { + if (!connectedLatch.await(10, TimeUnit.SECONDS)) { + throw new RuntimeException( + "Timeout waiting for WebSocket connection to sidecar at " + wsUri); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RuntimeException("Interrupted while connecting to sidecar WebSocket", e); + } + + if (wsSessionRef.get() == null) { + throw new RuntimeException( + "Failed to establish WebSocket connection to sidecar at " + wsUri); + } + logger.info("WebSocket connected to remote sidecar: {}", wsUri); + } + + /** + * 处理单条接收到的消息行:拦截控制消息或转发到 stdoutSink。 + */ + private void processReceivedLine(String line) { + if (isControlMessage(line)) { + handleControlMessage(line); + return; + } + logger.info("[WS-Remote] Received: {}", line); + stdoutSink.tryEmitNext(line); + } + + /** + * 判断是否为 sidecar 控制消息(session_meta、buffer_truncated、process_exited)。 + */ + private boolean isControlMessage(String text) { + return text.contains("\"type\":") + && (text.contains("\"session_meta\"") + || text.contains("\"buffer_truncated\"") + || text.contains("\"process_exited\"")); + } + + /** + * 处理 sidecar 控制消息,不转发到 stdoutSink(process_exited 除外)。 + */ + private void handleControlMessage(String text) { + try { + JsonNode node = CONTROL_MSG_MAPPER.readTree(text); + String type = node.has("type") ? node.get("type").asText() : null; + + if ("session_meta".equals(type)) { + if (node.has("sessionId")) { + sidecarSessionId = node.get("sessionId").asText(); + } + logger.info( + "[WS-Remote] session_meta: sidecarSessionId={}, state={}", + sidecarSessionId, + node.has("state") ? node.get("state").asText() : "unknown"); + return; + } + + if ("buffer_truncated".equals(type)) { + long dropped = node.has("droppedBytes") ? node.get("droppedBytes").asLong() : 0; + logger.warn("[WS-Remote] Buffer truncated: droppedBytes={}", dropped); + return; + } + + if ("process_exited".equals(type)) { + int code = node.has("code") ? node.get("code").asInt(-1) : -1; + String signal = node.has("signal") ? node.get("signal").asText(null) : null; + logger.info("[WS-Remote] Process exited: code={}, signal={}", code, signal); + // 转发给前端,让前端感知 CLI 进程退出 + stdoutSink.tryEmitNext(text); + return; + } + } catch (Exception e) { + logger.debug( + "Failed to parse control message, forwarding as stdout: {}", e.getMessage()); + } + // 无法解析为控制消息,作为普通 stdout 转发 + stdoutSink.tryEmitNext(text); + } + + private void startWsPing() { + wsPingFuture = + scheduler.scheduleAtFixedRate( + () -> { + try { + var session = wsSessionRef.get(); + if (session == null || !session.isOpen()) { + return; + } + session.send( + Mono.just( + session.pingMessage( + factory -> + factory.wrap( + "ping" + .getBytes( + StandardCharsets + .UTF_8))))) + .subscribe( + unused -> {}, + err -> + logger.warn( + "[WS-Ping] Failed: {}", + err.getMessage())); + } catch (Exception e) { + logger.warn("[WS-Ping] Error: {}", e.getMessage()); + } + }, + WS_PING_INTERVAL_SECONDS, + WS_PING_INTERVAL_SECONDS, + TimeUnit.SECONDS); + } + + private void notifyFault(String faultType, String suggestedAction) { + if (faultListener != null) { + try { + faultListener.accept( + new RuntimeFaultNotification( + faultType, SandboxType.REMOTE, suggestedAction)); + } catch (Exception e) { + logger.warn("Error notifying fault listener: {}", e.getMessage()); + } + } + } + + // ===== 用于测试的 Getter ===== + + URI getSidecarWsUri() { + return sidecarWsUri; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeAdapter.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeAdapter.java new file mode 100644 index 000000000..31c96ca15 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeAdapter.java @@ -0,0 +1,72 @@ +package com.alibaba.himarket.service.hicoding.runtime; + +import com.alibaba.himarket.service.hicoding.filesystem.FileSystemAdapter; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import java.io.IOException; +import reactor.core.publisher.Flux; + +/** + * 运行时抽象层核心接口。 + *

+ * 定义所有运行时实现(Local、K8s)必须遵循的统一契约, + * 屏蔽底层运行时差异,使上层业务代码无需感知具体运行时类型。 + */ +public interface RuntimeAdapter { + + /** + * 获取运行时类型标识。 + */ + SandboxType getType(); + + /** + * 启动运行时实例。 + * + * @param config 运行时配置 + * @return 实例 ID + * @throws RuntimeException 启动失败时抛出 + */ + String start(RuntimeConfig config) throws RuntimeException; + + /** + * 发送 JSON-RPC 消息到 CLI 进程。 + * + * @param jsonLine JSON-RPC 消息字符串 + * @throws IOException 发送失败时抛出 + */ + void send(String jsonLine) throws IOException; + + /** + * 获取 CLI 进程输出的响应式流。 + *

+ * 每个元素是一行 JSON-RPC 响应消息。 + * + * @return stdout 响应式流 + */ + Flux stdout(); + + /** + * 查询运行时实例当前状态。 + * + * @return 运行时状态 + */ + RuntimeStatus getStatus(); + + /** + * 检查运行时实例是否存活。 + * + * @return true 表示存活 + */ + boolean isAlive(); + + /** + * 优雅关闭运行时实例,释放相关资源。 + */ + void close(); + + /** + * 获取文件系统适配器,用于操作运行时工作空间中的文件。 + * + * @return 文件系统适配器 + */ + FileSystemAdapter getFileSystem(); +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeConfig.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeConfig.java new file mode 100644 index 000000000..63080e1eb --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeConfig.java @@ -0,0 +1,65 @@ +package com.alibaba.himarket.service.hicoding.runtime; + +import java.util.List; +import java.util.Map; + +/** + * 运行时配置数据类,封装创建运行时实例所需的全部参数。 + */ +public class RuntimeConfig { + + private String userId; + private String providerKey; + private String command; + private List args; + private String cwd; + private Map env; + + public String getUserId() { + return userId; + } + + public void setUserId(String userId) { + this.userId = userId; + } + + public String getProviderKey() { + return providerKey; + } + + public void setProviderKey(String providerKey) { + this.providerKey = providerKey; + } + + public String getCommand() { + return command; + } + + public void setCommand(String command) { + this.command = command; + } + + public List getArgs() { + return args; + } + + public void setArgs(List args) { + this.args = args; + } + + public String getCwd() { + return cwd; + } + + public void setCwd(String cwd) { + this.cwd = cwd; + } + + public Map getEnv() { + return env; + } + + public void setEnv(Map env) { + this.env = env; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeFaultNotification.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeFaultNotification.java new file mode 100644 index 000000000..e8b69f02a --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeFaultNotification.java @@ -0,0 +1,40 @@ +package com.alibaba.himarket.service.hicoding.runtime; + +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; + +/** + * 统一的运行时异常通知格式。 + *

+ * 当运行时实例发生异常(健康检查失败、进程崩溃、通信中断等)时, + * 通过此记录向上层发送标准化的故障通知。 + * + * @param faultType 故障类型(如 PROCESS_CRASHED、HEALTH_CHECK_FAILURE、IDLE_TIMEOUT、CONNECTION_LOST) + * @param sandboxType 沙箱类型(LOCAL、K8S) + * @param suggestedAction 建议操作(如 RECONNECT、RESTART、DESTROY) + * + * Requirements: 8.5 + */ +public record RuntimeFaultNotification( + String faultType, SandboxType sandboxType, String suggestedAction) { + + /** 进程崩溃 */ + public static final String FAULT_PROCESS_CRASHED = "PROCESS_CRASHED"; + + /** 健康检查失败 */ + public static final String FAULT_HEALTH_CHECK_FAILURE = "HEALTH_CHECK_FAILURE"; + + /** 空闲超时 */ + public static final String FAULT_IDLE_TIMEOUT = "IDLE_TIMEOUT"; + + /** 连接丢失 */ + public static final String FAULT_CONNECTION_LOST = "CONNECTION_LOST"; + + /** 建议重新连接 */ + public static final String ACTION_RECONNECT = "RECONNECT"; + + /** 建议重新启动 */ + public static final String ACTION_RESTART = "RESTART"; + + /** 已销毁,需重新创建 */ + public static final String ACTION_RECREATE = "RECREATE"; +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeStatus.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeStatus.java new file mode 100644 index 000000000..bd34af527 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeStatus.java @@ -0,0 +1,22 @@ +package com.alibaba.himarket.service.hicoding.runtime; + +/** + * 运行时实例状态枚举。 + */ +public enum RuntimeStatus { + + /** 创建中 */ + CREATING, + + /** 运行中 */ + RUNNING, + + /** WS 已断开,但 sidecar session 可能仍存活 */ + DETACHED, + + /** 已停止 */ + STOPPED, + + /** 异常 */ + ERROR +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/ConfigFile.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/ConfigFile.java new file mode 100644 index 000000000..c895456c1 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/ConfigFile.java @@ -0,0 +1,11 @@ +package com.alibaba.himarket.service.hicoding.sandbox; + +public record ConfigFile(String relativePath, String content, String contentHash, ConfigType type) { + + public enum ConfigType { + MODEL_SETTINGS, + MCP_CONFIG, + SKILL_CONFIG, + CUSTOM + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/E2BSandboxProvider.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/E2BSandboxProvider.java new file mode 100644 index 000000000..ac6d2a469 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/E2BSandboxProvider.java @@ -0,0 +1,54 @@ +package com.alibaba.himarket.service.hicoding.sandbox; + +import com.alibaba.himarket.service.hicoding.runtime.RuntimeAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeConfig; +import java.io.IOException; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.stereotype.Component; + +/** + * E2B 云沙箱提供者(空实现)。 + * + *

预留 E2B 对接接口,当前所有操作方法抛出 {@link UnsupportedOperationException}。 + * 仅在 {@code acp.e2b.enabled=true} 时注册到 Spring 容器。 + */ +@Component +@ConditionalOnProperty(name = "acp.e2b.enabled", havingValue = "true") +public class E2BSandboxProvider implements SandboxProvider { + + @Override + public SandboxType getType() { + return SandboxType.E2B; + } + + @Override + public SandboxInfo acquire(SandboxConfig config) { + throw new UnsupportedOperationException("E2B 尚未实现"); + } + + @Override + public void release(SandboxInfo info) { + // 空实现 + } + + @Override + public boolean healthCheck(SandboxInfo info) { + return false; + } + + @Override + public void writeFile(SandboxInfo info, String relativePath, String content) + throws IOException { + throw new UnsupportedOperationException("E2B 尚未实现"); + } + + @Override + public String readFile(SandboxInfo info, String relativePath) throws IOException { + throw new UnsupportedOperationException("E2B 尚未实现"); + } + + @Override + public RuntimeAdapter connectSidecar(SandboxInfo info, RuntimeConfig config) { + throw new UnsupportedOperationException("E2B 尚未实现"); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/ExecResult.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/ExecResult.java new file mode 100644 index 000000000..679270db5 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/ExecResult.java @@ -0,0 +1,6 @@ +package com.alibaba.himarket.service.hicoding.sandbox; + +/** + * 沙箱命令执行结果。 + */ +public record ExecResult(int exitCode, String stdout, String stderr) {} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/OpenSandboxProvider.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/OpenSandboxProvider.java new file mode 100644 index 000000000..d6a3137c5 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/OpenSandboxProvider.java @@ -0,0 +1,54 @@ +package com.alibaba.himarket.service.hicoding.sandbox; + +import com.alibaba.himarket.service.hicoding.runtime.RuntimeAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeConfig; +import java.io.IOException; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.stereotype.Component; + +/** + * OpenSandbox 沙箱提供者(空实现)。 + * + *

预留 OpenSandbox 对接接口,当前所有操作方法抛出 {@link UnsupportedOperationException}。 + * 仅在 {@code acp.open-sandbox.enabled=true} 时注册到 Spring 容器。 + */ +@Component +@ConditionalOnProperty(name = "acp.open-sandbox.enabled", havingValue = "true") +public class OpenSandboxProvider implements SandboxProvider { + + @Override + public SandboxType getType() { + return SandboxType.OPEN_SANDBOX; + } + + @Override + public SandboxInfo acquire(SandboxConfig config) { + throw new UnsupportedOperationException("OpenSandbox 尚未实现"); + } + + @Override + public void release(SandboxInfo info) { + // 空实现 + } + + @Override + public boolean healthCheck(SandboxInfo info) { + return false; + } + + @Override + public void writeFile(SandboxInfo info, String relativePath, String content) + throws IOException { + throw new UnsupportedOperationException("OpenSandbox 尚未实现"); + } + + @Override + public String readFile(SandboxInfo info, String relativePath) throws IOException { + throw new UnsupportedOperationException("OpenSandbox 尚未实现"); + } + + @Override + public RuntimeAdapter connectSidecar(SandboxInfo info, RuntimeConfig config) { + throw new UnsupportedOperationException("OpenSandbox 尚未实现"); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/PodInfo.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/PodInfo.java new file mode 100644 index 000000000..e25b396e3 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/PodInfo.java @@ -0,0 +1,15 @@ +package com.alibaba.himarket.service.hicoding.sandbox; + +import java.net.URI; + +/** + * Pod 信息传递对象,作为 acquirePod 的返回值。 + * + * @param podName Pod 名称 + * @param podIp Pod IP 地址 + * @param serviceIp Service ClusterIP/LoadBalancer IP(可为 null,仅在创建了 Service 时有值) + * @param sidecarWsUri Sidecar WebSocket 端点 URI + * @param reused 是否为复用的已有 Pod + */ +public record PodInfo( + String podName, String podIp, String serviceIp, URI sidecarWsUri, boolean reused) {} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/RemoteSandboxProvider.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/RemoteSandboxProvider.java new file mode 100644 index 000000000..097405a01 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/RemoteSandboxProvider.java @@ -0,0 +1,163 @@ +package com.alibaba.himarket.service.hicoding.sandbox; + +import com.alibaba.himarket.config.AcpProperties; +import com.alibaba.himarket.service.hicoding.runtime.RemoteRuntimeAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeConfig; +import java.io.IOException; +import java.net.URI; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + +/** + * 远程沙箱提供者。 + * + *

连接远程 Sidecar 服务,不依赖 K8s API。 Sidecar 可以部署在 K8s Pod、Docker 容器或裸机上,只要地址可达即可。 + * 所有用户共用同一个 Sidecar,通过 {@code /workspace/{userId}} 实现工作目录隔离。 + * + *

文件操作使用绝对路径(参考 OpenSandbox execd 设计),由本 Provider 负责将相对路径 + * 转换为基于 {@code workspacePath} 的绝对路径,Sidecar 端不再需要知道用户上下文。 + * + *

文件操作委托给 {@link SandboxHttpClient},WebSocket 连接复用 {@link RemoteRuntimeAdapter}。 + */ +@Component +public class RemoteSandboxProvider implements SandboxProvider { + + private static final Logger logger = LoggerFactory.getLogger(RemoteSandboxProvider.class); + + private final SandboxHttpClient sandboxHttpClient; + private final AcpProperties acpProperties; + + public RemoteSandboxProvider(SandboxHttpClient sandboxHttpClient, AcpProperties acpProperties) { + this.sandboxHttpClient = sandboxHttpClient; + this.acpProperties = acpProperties; + } + + @Override + public SandboxType getType() { + return SandboxType.REMOTE; + } + + @Override + public SandboxInfo acquire(SandboxConfig config) { + if (config.userId() == null || config.userId().isBlank()) { + throw new IllegalArgumentException("userId 不能为空"); + } + String userId = config.userId(); + if (userId.contains("..") || userId.contains("/")) { + throw new IllegalArgumentException("userId 包含非法字符: " + userId); + } + + AcpProperties.RemoteConfig remoteConfig = acpProperties.getRemote(); + String host = remoteConfig.getHost(); + int port = remoteConfig.getPort(); + + String workspacePath = "/workspace/" + userId; + + logger.info( + "[RemoteSandboxProvider] acquire: userId={}, host={}:{}, workspacePath={}", + userId, + host, + port, + workspacePath); + + return new SandboxInfo( + SandboxType.REMOTE, "sandbox-remote", host, port, workspacePath, true, Map.of()); + } + + @Override + public void release(SandboxInfo info) { + // 空操作:远程 Sidecar 生命周期由外部管理 + } + + @Override + public boolean healthCheck(SandboxInfo info) { + return sandboxHttpClient.healthCheckWithLog(sidecarBaseUrl(info), info.sandboxId()); + } + + /** + * 写入文件到沙箱。将相对路径转换为基于 workspacePath 的绝对路径。 + * + *

例如:relativePath=".qwen/settings.json", workspacePath="/workspace/dev-xxx" + * → 实际写入 "/workspace/dev-xxx/.qwen/settings.json" + */ + @Override + public void writeFile(SandboxInfo info, String relativePath, String content) + throws IOException { + String absolutePath = toAbsolutePath(info, relativePath); + sandboxHttpClient.writeFile(sidecarBaseUrl(info), info.sandboxId(), absolutePath, content); + } + + /** + * 从沙箱读取文件。将相对路径转换为基于 workspacePath 的绝对路径。 + */ + @Override + public String readFile(SandboxInfo info, String relativePath) throws IOException { + String absolutePath = toAbsolutePath(info, relativePath); + return sandboxHttpClient.readFile(sidecarBaseUrl(info), info.sandboxId(), absolutePath); + } + + @Override + public ExecResult exec( + SandboxInfo info, + String command, + java.util.List args, + java.time.Duration timeout) + throws IOException { + return sandboxHttpClient.exec( + sidecarBaseUrl(info), info.sandboxId(), command, args, timeout); + } + + @Override + public RuntimeAdapter connectSidecar(SandboxInfo info, RuntimeConfig config) { + RemoteRuntimeAdapter adapter = new RemoteRuntimeAdapter(info.host(), info.sidecarPort()); + + String command = config.getCommand(); + String args = config.getArgs() != null ? String.join(" ", config.getArgs()) : null; + + URI wsUri = + info.sidecarWsUri( + command, args != null ? args : "", config.getEnv(), info.workspacePath()); + + adapter.connect(wsUri); + return adapter; + } + + private String sidecarBaseUrl(SandboxInfo info) { + return "http://" + info.host() + ":" + info.sidecarPort(); + } + + /** + * 将相对路径转换为基于 workspacePath 的绝对路径。 + * + *

参考 OpenSandbox execd 设计:文件操作使用绝对路径,由调用方负责构建。 + * Sidecar 端不再依赖 WORKSPACE_ROOT 做路径解析。 + */ + private String toAbsolutePath(SandboxInfo info, String relativePath) { + String wp = info.workspacePath(); + if (wp == null || wp.isEmpty()) { + return relativePath; + } + String cleaned = relativePath; + if (cleaned.startsWith("./")) { + cleaned = cleaned.substring(2); + } else if (cleaned.startsWith("/")) { + // 绝对路径:校验是否在 workspacePath 范围内 + Path normalized = Paths.get(cleaned).normalize(); + if (!normalized.startsWith(Paths.get(wp).normalize())) { + throw new SecurityException("路径越界: " + relativePath); + } + return normalized.toString(); + } + String full = wp.endsWith("/") ? wp + cleaned : wp + "/" + cleaned; + Path normalized = Paths.get(full).normalize(); + if (!normalized.startsWith(Paths.get(wp).normalize())) { + throw new SecurityException("路径越界: " + relativePath); + } + return normalized.toString(); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxConfig.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxConfig.java new file mode 100644 index 000000000..96437228c --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxConfig.java @@ -0,0 +1,16 @@ +package com.alibaba.himarket.service.hicoding.sandbox; + +import java.util.Map; + +/** + * 沙箱创建/获取配置。 + * 统一各沙箱类型的配置参数。 + */ +public record SandboxConfig( + String userId, + SandboxType type, + String workspacePath, + Map env, + Map resources, + // E2B 特有配置(未来) + String e2bTemplate) {} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxHttpClient.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxHttpClient.java new file mode 100644 index 000000000..3c5a73c8d --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxHttpClient.java @@ -0,0 +1,229 @@ +package com.alibaba.himarket.service.hicoding.sandbox; + +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.time.Duration; +import java.util.List; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + +/** + * 沙箱 Sidecar HTTP 客户端。 + * + *

统一封装对 Sidecar HTTP API 的调用(writeFile / readFile / healthCheck / exec), + * 消除 RemoteSandboxProvider 和 LocalSandboxProvider 中的代码重复。 + * + *

可复用性说明

+ *

本组件设计为跨 Provider 可复用。OpenSandbox 的 execd 组件提供与 HiMarket Sidecar + * 兼容的 /files/* HTTP API(/files/write、/files/read、/files/extract), + * 因此未来 {@code OpenSandboxProvider} 可直接注入本客户端完成文件操作, + * 无需重复实现 HTTP 调用逻辑。 + * + *

唯一差异在于 baseUrl 的构造方式: + *

    + *
  • HiMarket Sidecar:{@code http://:}
  • + *
  • OpenSandbox execd:{@code http://:} + * (端口默认 8080,可通过 OpenSandbox Server API 获取)
  • + *
+ * + * @see RemoteSandboxProvider + * @see LocalSandboxProvider + * @see SandboxProvider OpenSandbox 对接说明 + */ +@Component +public class SandboxHttpClient { + + private static final Logger logger = LoggerFactory.getLogger(SandboxHttpClient.class); + private static final Duration DEFAULT_TIMEOUT = Duration.ofSeconds(10); + private static final Duration HEALTH_CHECK_TIMEOUT = Duration.ofSeconds(5); + private final HttpClient httpClient; + private final ObjectMapper objectMapper; + + public SandboxHttpClient(ObjectMapper objectMapper) { + this.objectMapper = objectMapper; + this.httpClient = + HttpClient.newBuilder() + .connectTimeout(DEFAULT_TIMEOUT) + .version(HttpClient.Version.HTTP_1_1) + .build(); + } + + /** + * 写入文件到沙箱。 + * + *

调用 Sidecar POST /files/write,请求体为 {@code {"path": relativePath, "content": content}}。 + * + * @param baseUrl Sidecar 基础 URL(如 http://host:port) + * @param sandboxId 沙箱标识(用于异常信息) + * @param relativePath 文件相对路径 + * @param content 文件内容 + * @throws IOException 当 HTTP 响应非 200 或请求失败时 + */ + public void writeFile(String baseUrl, String sandboxId, String relativePath, String content) + throws IOException { + String url = baseUrl + "/files/write"; + String body = + objectMapper.writeValueAsString(Map.of("path", relativePath, "content", content)); + HttpResponse response = doPost(url, body, DEFAULT_TIMEOUT); + if (response.statusCode() != 200) { + throw new IOException( + "Sidecar writeFile 失败 (sandbox: " + sandboxId + "): " + response.body()); + } + } + + /** + * 从沙箱读取文件。 + * + *

调用 Sidecar POST /files/read,请求体为 {@code {"path": relativePath}}, + * 从响应 JSON 中解析 {@code content} 字段。 + * + * @param baseUrl Sidecar 基础 URL + * @param sandboxId 沙箱标识(用于异常信息) + * @param relativePath 文件相对路径 + * @return 文件内容 + * @throws IOException 当 HTTP 响应非 200 或请求失败时 + */ + public String readFile(String baseUrl, String sandboxId, String relativePath) + throws IOException { + String url = baseUrl + "/files/read"; + String body = objectMapper.writeValueAsString(Map.of("path", relativePath)); + HttpResponse response = doPost(url, body, DEFAULT_TIMEOUT); + if (response.statusCode() != 200) { + throw new IOException( + "Sidecar readFile 失败 (sandbox: " + sandboxId + "): " + response.body()); + } + return objectMapper.readTree(response.body()).get("content").asText(); + } + + /** + * 健康检查。 + * + *

调用 Sidecar GET /health,超时 5 秒。任何异常均返回 false。 + * + * @param baseUrl Sidecar 基础 URL + * @return true 表示健康,false 表示不可达或异常 + */ + public boolean healthCheck(String baseUrl) { + try { + HttpResponse response = doGet(baseUrl + "/health", HEALTH_CHECK_TIMEOUT); + return response.statusCode() == 200; + } catch (Exception e) { + return false; + } + } + + /** + * 带日志的健康检查。 + * + *

与 {@link #healthCheck(String)} 相同逻辑,但在失败时记录 warn 日志, + * 包含 sandboxId、host、status、body 等诊断信息。 + * + * @param baseUrl Sidecar 基础 URL + * @param sandboxId 沙箱标识(用于日志) + * @return true 表示健康,false 表示不可达或异常 + */ + public boolean healthCheckWithLog(String baseUrl, String sandboxId) { + String url = baseUrl + "/health"; + try { + HttpResponse response = doGet(url, HEALTH_CHECK_TIMEOUT); + if (response.statusCode() == 200) { + return true; + } + logger.warn( + "[SandboxHttpClient] healthCheck HTTP 非 200 (sandbox: {}, host: {}, status: {}," + + " body: {})", + sandboxId, + URI.create(baseUrl).getHost(), + response.statusCode(), + response.body()); + return false; + } catch (Exception e) { + logger.warn( + "[SandboxHttpClient] healthCheck HTTP 请求失败 (sandbox: {}, host: {}): {} - {}", + sandboxId, + URI.create(baseUrl).getHost(), + e.getClass().getSimpleName(), + e.getMessage() != null ? e.getMessage() : e.toString()); + return false; + } + } + + /** + * 在沙箱内执行命令。 + * + *

调用 Sidecar POST /exec,请求体为 {"command": command, "args": args}。 + * 使用调用方传入的 timeout 作为 HTTP 请求超时时间。 + * + * @param baseUrl Sidecar 基础 URL + * @param sandboxId 沙箱标识(用于异常信息) + * @param command 要执行的命令 + * @param args 命令参数列表 + * @param timeout HTTP 请求超时时间 + * @return 命令执行结果 + * @throws IOException 当 HTTP 响应非 200 或请求失败时 + */ + public ExecResult exec( + String baseUrl, String sandboxId, String command, List args, Duration timeout) + throws IOException { + String url = baseUrl + "/exec"; + String body = objectMapper.writeValueAsString(Map.of("command", command, "args", args)); + HttpResponse response = doPost(url, body, timeout); + if (response.statusCode() != 200) { + throw new IOException( + "Sidecar exec 失败 (sandbox: " + + sandboxId + + ", status: " + + response.statusCode() + + "): " + + response.body()); + } + var tree = objectMapper.readTree(response.body()); + return new ExecResult( + tree.get("exitCode").asInt(), + tree.get("stdout").asText(), + tree.get("stderr").asText()); + } + + // ===== 内部 HTTP 调用方法 ===== + + /** + * 发送 POST 请求(JSON body)。 + * 统一处理 InterruptedException:恢复中断标志 + 包装为 IOException。 + */ + private HttpResponse doPost(String url, String body, Duration timeout) + throws IOException { + try { + return httpClient.send( + HttpRequest.newBuilder(URI.create(url)) + .POST(HttpRequest.BodyPublishers.ofString(body)) + .header("Content-Type", "application/json") + .timeout(timeout) + .build(), + HttpResponse.BodyHandlers.ofString()); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new IOException("HTTP 请求被中断: " + url, e); + } + } + + /** + * 发送 GET 请求。 + * 统一处理 InterruptedException:恢复中断标志 + 包装为 IOException。 + */ + private HttpResponse doGet(String url, Duration timeout) throws IOException { + try { + return httpClient.send( + HttpRequest.newBuilder(URI.create(url)).GET().timeout(timeout).build(), + HttpResponse.BodyHandlers.ofString()); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new IOException("HTTP 请求被中断: " + url, e); + } + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxInfo.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxInfo.java new file mode 100644 index 000000000..e6c958340 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxInfo.java @@ -0,0 +1,104 @@ +package com.alibaba.himarket.service.hicoding.sandbox; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.net.URI; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.Map; + +/** + * 沙箱实例信息,由 SandboxProvider.acquire() 返回。 + * 包含连接沙箱所需的所有信息。 + */ +public record SandboxInfo( + SandboxType type, + String sandboxId, + String host, + int sidecarPort, + String workspacePath, + boolean reused, + Map metadata) { + + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + /** + * 构建 Sidecar WebSocket URI。 + * + * @param command 要执行的命令 + * @param args 命令参数(可为 null 或空白) + * @return 完整的 WebSocket URI,args 部分经过 URL 编码 + */ + public URI sidecarWsUri(String command, String args) { + return sidecarWsUri(command, args, null); + } + + /** + * 构建 Sidecar WebSocket URI,支持传递环境变量和工作目录。 + * + * @param command 要执行的命令 + * @param args 命令参数(可为 null 或空白) + * @param env 环境变量 Map(可为 null) + * @param cwd 工作目录路径(可为 null 或空白) + * @return 完整的 WebSocket URI,args、env 和 cwd 部分经过 URL 编码 + */ + public URI sidecarWsUri(String command, String args, Map env, String cwd) { + return sidecarWsUri(command, args, env, cwd, null); + } + + /** + * 构建 Sidecar WebSocket URI,支持传递环境变量、工作目录和 sessionId。 + * + *

当 sessionId 非空时,生成 attach URI(不含 command 等参数), + * sidecar 将 attach 到已有 session 而非创建新进程。 + * + * @param command 要执行的命令(新建时必需,attach 时忽略) + * @param args 命令参数(可为 null 或空白) + * @param env 环境变量 Map(可为 null) + * @param cwd 工作目录路径(可为 null 或空白) + * @param sessionId sidecar session ID(非空则为 attach 模式) + * @return 完整的 WebSocket URI + */ + public URI sidecarWsUri( + String command, String args, Map env, String cwd, String sessionId) { + // Attach 模式:只需 sessionId + if (sessionId != null && !sessionId.isBlank()) { + return URI.create( + "ws://" + + host + + ":" + + sidecarPort + + "/?sessionId=" + + URLEncoder.encode(sessionId, StandardCharsets.UTF_8)); + } + // 新建模式:需要 command 等参数 + String query = "command=" + command; + if (args != null && !args.isBlank()) { + query += "&args=" + URLEncoder.encode(args, StandardCharsets.UTF_8); + } + if (env != null && !env.isEmpty()) { + try { + String envJson = OBJECT_MAPPER.writeValueAsString(env); + query += "&env=" + URLEncoder.encode(envJson, StandardCharsets.UTF_8); + } catch (JsonProcessingException e) { + throw new RuntimeException("Failed to serialize env to JSON", e); + } + } + if (cwd != null && !cwd.isBlank()) { + query += "&cwd=" + URLEncoder.encode(cwd, StandardCharsets.UTF_8); + } + return URI.create("ws://" + host + ":" + sidecarPort + "/?" + query); + } + + /** + * 构建 Sidecar WebSocket URI,支持传递环境变量。 + * + * @param command 要执行的命令 + * @param args 命令参数(可为 null 或空白) + * @param env 环境变量 Map(可为 null) + * @return 完整的 WebSocket URI,args 和 env 部分经过 URL 编码 + */ + public URI sidecarWsUri(String command, String args, Map env) { + return sidecarWsUri(command, args, env, null); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxProvider.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxProvider.java new file mode 100644 index 000000000..971bdc565 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxProvider.java @@ -0,0 +1,95 @@ +package com.alibaba.himarket.service.hicoding.sandbox; + +import com.alibaba.himarket.service.hicoding.runtime.RuntimeAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeConfig; +import java.io.IOException; +import java.net.URI; + +/** + * 统一沙箱提供者接口。 + * + *

抽象不同沙箱环境(本地 Mac、K8s Pod、E2B)的差异, 为 SandboxInitPipeline 提供统一的操作契约。 + * + *

OpenSandbox 对接说明

+ *

若需对接 OpenSandbox, + * 创建 {@code OpenSandboxProvider} 实现本接口,关键适配点: + *

    + *
  • acquire():调用 OpenSandbox Python FastAPI Server(POST /sandboxes)创建沙箱实例, + * 返回的 sandboxId 和 host 封装为 {@link SandboxInfo}
  • + *
  • release():调用 DELETE /sandboxes/{id} 销毁沙箱
  • + *
  • writeFile / readFile / healthCheck / exec: + * OpenSandbox 的 execd 组件提供兼容的 /files/* HTTP API, + * 可直接复用 {@link SandboxHttpClient},无需重复实现
  • + *
  • connectSidecar():OpenSandbox 使用 HTTP + SSE 而非 WebSocket 桥接 CLI, + * 需要适配 {@link RuntimeAdapter} 的 stdout() 流为 SSE 事件流, + * send() 方法改为 HTTP POST /command 调用
  • + *
+ * + * @see SandboxHttpClient 可复用的 HTTP 客户端(兼容 OpenSandbox execd /files/* API) + * @see SandboxType 沙箱类型枚举(需新增 OPEN_SANDBOX 值) + */ +public interface SandboxProvider { + + /** 沙箱类型标识 */ + SandboxType getType(); + + /** + * 获取或创建沙箱实例。 + */ + SandboxInfo acquire(SandboxConfig config); + + /** + * 释放沙箱资源。 + */ + void release(SandboxInfo info); + + /** + * 文件系统健康检查。 通过 Sidecar HTTP API 验证沙箱文件系统可读写。 + */ + boolean healthCheck(SandboxInfo info); + + /** + * 写入文件到沙箱工作空间。 所有沙箱类型统一通过 Sidecar HTTP API(POST /files/write)写入。 + */ + void writeFile(SandboxInfo info, String relativePath, String content) throws IOException; + + /** + * 从沙箱工作空间读取文件。 所有沙箱类型统一通过 Sidecar HTTP API(POST /files/read)读取。 + */ + String readFile(SandboxInfo info, String relativePath) throws IOException; + + /** + * 在沙箱内执行命令。 + * 默认抛出 UnsupportedOperationException。 + * + * @param info 沙箱信息 + * @param command 要执行的命令 + * @param args 命令参数列表 + * @param timeout 超时时间 + * @return 命令执行结果 + */ + default ExecResult exec( + SandboxInfo info, + String command, + java.util.List args, + java.time.Duration timeout) + throws java.io.IOException { + throw new UnsupportedOperationException("exec not implemented"); + } + + /** + * 建立到 Sidecar 的 WebSocket 连接。 所有沙箱类型都通过 Sidecar WebSocket 桥接 CLI。 + */ + RuntimeAdapter connectSidecar(SandboxInfo info, RuntimeConfig config); + + /** 获取 Sidecar WebSocket URI(不带环境变量)。 */ + default URI getSidecarUri(SandboxInfo info, String command, String args) { + return getSidecarUri(info, command, args, null); + } + + /** 获取 Sidecar WebSocket URI(支持环境变量)。 */ + default URI getSidecarUri( + SandboxInfo info, String command, String args, java.util.Map env) { + return info.sidecarWsUri(command, args, env); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxProviderRegistry.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxProviderRegistry.java new file mode 100644 index 000000000..8905ac22d --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxProviderRegistry.java @@ -0,0 +1,36 @@ +package com.alibaba.himarket.service.hicoding.sandbox; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import org.springframework.stereotype.Component; + +/** + * SandboxProvider 注册中心。 + * 根据 SandboxType 查找对应的 Provider 实现。 + */ +@Component +public class SandboxProviderRegistry { + + private final Map providers; + + public SandboxProviderRegistry(List providerList) { + this.providers = + providerList.stream() + .collect(Collectors.toMap(SandboxProvider::getType, Function.identity())); + } + + public SandboxProvider getProvider(SandboxType type) { + SandboxProvider provider = providers.get(type); + if (provider == null) { + throw new IllegalArgumentException("不支持的沙箱类型: " + type); + } + return provider; + } + + public Set supportedTypes() { + return providers.keySet(); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxType.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxType.java new file mode 100644 index 000000000..6f01333ab --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxType.java @@ -0,0 +1,42 @@ +package com.alibaba.himarket.service.hicoding.sandbox; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * 沙箱类型枚举。 + * 统一标识 CLI Agent 运行在哪种沙箱环境中。 + * JSON 序列化值分别为 "remote"、"open-sandbox"、"e2b",与前端类型定义一致。 + */ +public enum SandboxType { + + /** 远程沙箱:连接远程 Sidecar 服务(K8s / Docker / 裸机均可) */ + REMOTE("remote"), + + /** OpenSandbox 沙箱:通过 OpenSandbox Server API 管理 */ + OPEN_SANDBOX("open-sandbox"), + + /** E2B 云沙箱:通过 E2B SDK 管理 */ + E2B("e2b"); + + private final String value; + + SandboxType(String value) { + this.value = value; + } + + @JsonValue + public String getValue() { + return value; + } + + @JsonCreator + public static SandboxType fromValue(String value) { + for (SandboxType type : values()) { + if (type.value.equalsIgnoreCase(value) || type.name().equalsIgnoreCase(value)) { + return type; + } + } + throw new IllegalArgumentException("未知的沙箱类型: " + value); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/ConfigInjectionPhase.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/ConfigInjectionPhase.java new file mode 100644 index 000000000..a3ef8aee8 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/ConfigInjectionPhase.java @@ -0,0 +1,109 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +import com.alibaba.himarket.service.hicoding.cli.ConfigFileBuilder; +import com.alibaba.himarket.service.hicoding.sandbox.ConfigFile; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxInfo; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxProvider; +import java.io.IOException; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * 将配置文件注入到沙箱内部。 + * + *

逐个 writeFile 注入配置文件。Skill 文件改由 nacos-cli 在沙箱内下载, + * 剩余配置文件数量很少,无需压缩解压。 + */ +public class ConfigInjectionPhase implements InitPhase { + + private static final Logger logger = LoggerFactory.getLogger(ConfigInjectionPhase.class); + + private final ConfigFileBuilder configFileBuilder; + + /** 无参构造函数,保持向后兼容。 */ + public ConfigInjectionPhase() { + this.configFileBuilder = null; + } + + /** 带 ConfigFileBuilder 的构造函数,支持从 ResolvedSessionConfig 动态生成配置文件。 */ + public ConfigInjectionPhase(ConfigFileBuilder configFileBuilder) { + this.configFileBuilder = configFileBuilder; + } + + @Override + public String name() { + return "config-injection"; + } + + @Override + public int order() { + return 300; + } + + @Override + public boolean shouldExecute(InitContext context) { + return context.getSessionConfig() != null + && context.getProviderConfig() != null + && context.getProviderConfig().isSupportsCustomModel(); + } + + @Override + public void execute(InitContext context) throws InitPhaseException { + List pendingConfigs = context.getInjectedConfigs(); + + // 如果 injectedConfigs 未被外部预填充,尝试从 resolvedSessionConfig 动态生成 + if ((pendingConfigs == null || pendingConfigs.isEmpty()) + && configFileBuilder != null + && context.getResolvedSessionConfig() != null) { + pendingConfigs = + configFileBuilder.build( + context.getResolvedSessionConfig(), + context.getRuntimeConfig().getProviderKey(), + context.getProviderConfig(), + context.getRuntimeConfig()); + context.setInjectedConfigs(pendingConfigs); + } + + if (pendingConfigs == null || pendingConfigs.isEmpty()) { + logger.info("[ConfigInjection] 无配置文件需要注入"); + return; + } + + SandboxProvider provider = context.getProvider(); + SandboxInfo info = context.getSandboxInfo(); + + try { + for (ConfigFile config : pendingConfigs) { + provider.writeFile(info, config.relativePath(), config.content()); + } + logger.info("[ConfigInjection] 逐个写入完成: {} 个文件已注入", pendingConfigs.size()); + + // 统计各类型文件数量 + long skillCount = pendingConfigs.stream().filter(c -> "skill".equals(c.type())).count(); + long mcpCount = pendingConfigs.stream().filter(c -> "mcp".equals(c.type())).count(); + long modelCount = pendingConfigs.stream().filter(c -> "model".equals(c.type())).count(); + long otherCount = pendingConfigs.size() - skillCount - mcpCount - modelCount; + + logger.info( + "[ConfigInjection] 配置注入完成: 共 {} 个文件 (skill={}, mcp={}, model={}, other={})", + pendingConfigs.size(), + skillCount, + mcpCount, + modelCount, + otherCount); + } catch (IOException e) { + throw new InitPhaseException("config-injection", "配置注入失败: " + e.getMessage(), e, true); + } + } + + @Override + public boolean verify(InitContext context) { + return true; + } + + @Override + public RetryPolicy retryPolicy() { + return RetryPolicy.none(); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/FileSystemReadyPhase.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/FileSystemReadyPhase.java new file mode 100644 index 000000000..f9f0d43f6 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/FileSystemReadyPhase.java @@ -0,0 +1,77 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +import com.alibaba.himarket.service.hicoding.sandbox.SandboxInfo; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * 验证沙箱文件系统可访问。 + * 通过 SandboxProvider.healthCheck() 统一验证。 + * + *

当健康检查失败时,提供包含 host:port 的友好错误信息, + * 帮助用户快速定位沙箱连接问题。使用快速失败策略(仅重试 1 次), + * 避免沙箱不可达时长时间无意义等待。 + */ +public class FileSystemReadyPhase implements InitPhase { + + private static final Logger logger = LoggerFactory.getLogger(FileSystemReadyPhase.class); + + @Override + public String name() { + return "filesystem-ready"; + } + + @Override + public int order() { + return 200; + } + + @Override + public boolean shouldExecute(InitContext context) { + return true; + } + + @Override + public void execute(InitContext context) throws InitPhaseException { + try { + SandboxProvider provider = context.getProvider(); + SandboxInfo info = context.getSandboxInfo(); + boolean healthy = provider.healthCheck(info); + if (!healthy) { + String detail = buildConnectivityErrorMessage(info); + throw new InitPhaseException("filesystem-ready", detail, true); + } + logger.info("[FileSystemReady] 通过: sandboxId={}", info.sandboxId()); + } catch (InitPhaseException e) { + throw e; + } catch (Exception e) { + throw new InitPhaseException( + "filesystem-ready", "文件系统健康检查异常: " + e.getMessage(), e, true); + } + } + + @Override + public boolean verify(InitContext context) { + return true; + } + + /** + * 不重试,失败直接返回。 + */ + @Override + public RetryPolicy retryPolicy() { + return RetryPolicy.none(); + } + + /** + * 构建包含连接信息的友好错误消息。 + */ + private String buildConnectivityErrorMessage(SandboxInfo info) { + String host = info.host(); + int port = info.sidecarPort(); + return String.format( + "沙箱 %s (%s:%d) 不可达,请检查: 1) sidecar 服务是否已启动 2) 地址和端口是否正确", + info.sandboxId(), host, port); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitConfig.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitConfig.java new file mode 100644 index 000000000..521100b3e --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitConfig.java @@ -0,0 +1,14 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +import java.time.Duration; + +public record InitConfig( + Duration totalTimeout, + boolean failFast, + boolean enableVerification, + boolean enableProgressNotify) { + + public static InitConfig defaults() { + return new InitConfig(Duration.ofSeconds(120), true, false, true); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitContext.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitContext.java new file mode 100644 index 000000000..bd940386f --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitContext.java @@ -0,0 +1,159 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +import com.alibaba.himarket.config.AcpProperties.CliProviderConfig; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeConfig; +import com.alibaba.himarket.service.hicoding.sandbox.ConfigFile; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxConfig; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxInfo; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxProvider; +import com.alibaba.himarket.service.hicoding.session.CliSessionConfig; +import com.alibaba.himarket.service.hicoding.session.ResolvedSessionConfig; +import java.time.Instant; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; +import org.springframework.web.socket.WebSocketSession; + +/** + * 初始化上下文,各阶段通过此对象共享数据。 核心改动:持有 SandboxProvider 引用,各阶段通过 provider 执行操作。 + */ +public class InitContext { + + // 核心:沙箱提供者 + private final SandboxProvider provider; + + // 输入参数 + private final String userId; + private final SandboxConfig sandboxConfig; + private final RuntimeConfig runtimeConfig; + private final CliProviderConfig providerConfig; + private final CliSessionConfig sessionConfig; + private final WebSocketSession frontendSession; + + // 阶段产出 + private SandboxInfo sandboxInfo; + private RuntimeAdapter runtimeAdapter; + private List injectedConfigs = new ArrayList<>(); + private ResolvedSessionConfig resolvedSessionConfig; + + // 状态追踪 + private final Map phaseStatuses = new ConcurrentHashMap<>(); + private final List events = new CopyOnWriteArrayList<>(); + private String lastError; + + public InitContext( + SandboxProvider provider, + String userId, + SandboxConfig sandboxConfig, + RuntimeConfig runtimeConfig, + CliProviderConfig providerConfig, + CliSessionConfig sessionConfig, + WebSocketSession frontendSession) { + this.provider = provider; + this.userId = userId; + this.sandboxConfig = sandboxConfig; + this.runtimeConfig = runtimeConfig; + this.providerConfig = providerConfig; + this.sessionConfig = sessionConfig; + this.frontendSession = frontendSession; + } + + // ========== Getters ========== + + public SandboxProvider getProvider() { + return provider; + } + + public String getUserId() { + return userId; + } + + public SandboxConfig getSandboxConfig() { + return sandboxConfig; + } + + public RuntimeConfig getRuntimeConfig() { + return runtimeConfig; + } + + public CliProviderConfig getProviderConfig() { + return providerConfig; + } + + public CliSessionConfig getSessionConfig() { + return sessionConfig; + } + + public WebSocketSession getFrontendSession() { + return frontendSession; + } + + public SandboxInfo getSandboxInfo() { + return sandboxInfo; + } + + public RuntimeAdapter getRuntimeAdapter() { + return runtimeAdapter; + } + + public List getInjectedConfigs() { + return injectedConfigs; + } + + public ResolvedSessionConfig getResolvedSessionConfig() { + return resolvedSessionConfig; + } + + public Map getPhaseStatuses() { + return phaseStatuses; + } + + public List getEvents() { + return Collections.unmodifiableList(events); + } + + public String getLastError() { + return lastError; + } + + // ========== Setters(阶段产出 + 状态) ========== + + public void setSandboxInfo(SandboxInfo sandboxInfo) { + this.sandboxInfo = sandboxInfo; + } + + public void setRuntimeAdapter(RuntimeAdapter runtimeAdapter) { + this.runtimeAdapter = runtimeAdapter; + } + + public void setInjectedConfigs(List injectedConfigs) { + this.injectedConfigs = injectedConfigs; + } + + public void setResolvedSessionConfig(ResolvedSessionConfig resolvedSessionConfig) { + this.resolvedSessionConfig = resolvedSessionConfig; + } + + public void setLastError(String lastError) { + this.lastError = lastError; + } + + // ========== 辅助方法 ========== + + /** 记录初始化事件。 */ + public void recordEvent(String phase, InitEvent.EventType type, String message) { + events.add(new InitEvent(Instant.now(), phase, type, message)); + } + + /** 返回所有已完成阶段的名称列表。 */ + public List completedPhases() { + List completed = new ArrayList<>(); + for (Map.Entry entry : phaseStatuses.entrySet()) { + if (entry.getValue() == PhaseStatus.COMPLETED) { + completed.add(entry.getKey()); + } + } + return completed; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitErrorCode.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitErrorCode.java new file mode 100644 index 000000000..7b7790380 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitErrorCode.java @@ -0,0 +1,56 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +/** + * 初始化错误码枚举。 + * 提供细粒度的错误分类,便于前端展示和运维排查。 + * + * 每个错误码对应 {@link SandboxInitPipeline} 中某个阶段的失败场景, + * 可通过 {@link #fromPhaseName(String)} 从 Pipeline 失败阶段名称自动映射。 + */ +public enum InitErrorCode { + SANDBOX_ACQUIRE_FAILED("SANDBOX_ACQUIRE_FAILED", "沙箱获取失败"), + FILESYSTEM_NOT_READY("FILESYSTEM_NOT_READY", "文件系统未就绪"), + CONFIG_RESOLVE_FAILED("CONFIG_RESOLVE_FAILED", "配置解析失败"), + CONFIG_INJECTION_FAILED("CONFIG_INJECTION_FAILED", "配置注入失败"), + SIDECAR_CONNECT_FAILED("SIDECAR_CONNECT_FAILED", "Sidecar 连接失败"), + CLI_NOT_READY("CLI_NOT_READY", "CLI 工具未就绪"), + PIPELINE_TIMEOUT("PIPELINE_TIMEOUT", "初始化超时"), + UNKNOWN_ERROR("UNKNOWN_ERROR", "未知错误"); + + private final String code; + private final String defaultMessage; + + InitErrorCode(String code, String defaultMessage) { + this.code = code; + this.defaultMessage = defaultMessage; + } + + public String getCode() { + return code; + } + + public String getDefaultMessage() { + return defaultMessage; + } + + /** + * 根据 Pipeline 失败阶段名称映射到错误码。 + * + * @param phaseName Pipeline 阶段名称(如 "sandbox-acquire"、"cli-ready"), + * 参见各 {@link InitPhase#name()} 实现 + * @return 对应的错误码,无法匹配时返回 {@link #UNKNOWN_ERROR} + */ + public static InitErrorCode fromPhaseName(String phaseName) { + if (phaseName == null) { + return UNKNOWN_ERROR; + } + return switch (phaseName) { + case "sandbox-acquire" -> SANDBOX_ACQUIRE_FAILED; + case "filesystem-ready" -> FILESYSTEM_NOT_READY; + case "config-injection" -> CONFIG_INJECTION_FAILED; + case "sidecar-connect" -> SIDECAR_CONNECT_FAILED; + case "cli-ready" -> CLI_NOT_READY; + default -> UNKNOWN_ERROR; + }; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitEvent.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitEvent.java new file mode 100644 index 000000000..63b83a0da --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitEvent.java @@ -0,0 +1,17 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +import java.time.Instant; + +public record InitEvent(Instant timestamp, String phase, EventType type, String message) { + + public enum EventType { + PHASE_START, + PHASE_COMPLETE, + PHASE_SKIP, + PHASE_RETRY, + PHASE_FAIL, + VERIFY_PASS, + VERIFY_FAIL, + WARNING + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitPhase.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitPhase.java new file mode 100644 index 000000000..adda87828 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitPhase.java @@ -0,0 +1,25 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +/** + * 初始化阶段接口。 每个阶段通过 InitContext.getProvider() 获取 SandboxProvider, 执行沙箱类型无关的初始化逻辑。 + */ +public interface InitPhase { + + /** 阶段名称,用于日志和事件追踪。 */ + String name(); + + /** 执行顺序,值越小越先执行。 */ + int order(); + + /** 判断当前阶段是否需要执行。返回 false 时跳过并记录 PHASE_SKIP 事件。 */ + boolean shouldExecute(InitContext context); + + /** 执行阶段逻辑。 */ + void execute(InitContext context) throws InitPhaseException; + + /** 验证阶段执行结果是否就绪。verify 返回 true 后才执行下一阶段。 */ + boolean verify(InitContext context); + + /** 该阶段的重试策略。 */ + RetryPolicy retryPolicy(); +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitPhaseException.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitPhaseException.java new file mode 100644 index 000000000..2f4dff452 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitPhaseException.java @@ -0,0 +1,28 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +public class InitPhaseException extends RuntimeException { + + private final String phaseName; + private final boolean retryable; + + public InitPhaseException(String phaseName, String message, boolean retryable) { + super(message); + this.phaseName = phaseName; + this.retryable = retryable; + } + + public InitPhaseException( + String phaseName, String message, Throwable cause, boolean retryable) { + super(message, cause); + this.phaseName = phaseName; + this.retryable = retryable; + } + + public String getPhaseName() { + return phaseName; + } + + public boolean isRetryable() { + return retryable; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitResult.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitResult.java new file mode 100644 index 000000000..88b31c0ff --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitResult.java @@ -0,0 +1,28 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +import java.time.Duration; +import java.util.List; +import java.util.Map; + +public record InitResult( + boolean success, + String failedPhase, + String errorMessage, + Duration totalDuration, + Map phaseDurations, + List events) { + + public static InitResult success( + Duration duration, Map phases, List events) { + return new InitResult(true, null, null, duration, phases, events); + } + + public static InitResult failure( + String phase, + String error, + Duration duration, + Map phases, + List events) { + return new InitResult(false, phase, error, duration, phases, events); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/PhaseStatus.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/PhaseStatus.java new file mode 100644 index 000000000..dc010c822 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/PhaseStatus.java @@ -0,0 +1,11 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +public enum PhaseStatus { + PENDING, + EXECUTING, + VERIFYING, + COMPLETED, + SKIPPED, + FAILED, + RETRYING +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/RetryPolicy.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/RetryPolicy.java new file mode 100644 index 000000000..a808042dd --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/RetryPolicy.java @@ -0,0 +1,24 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +import java.time.Duration; + +public record RetryPolicy( + int maxRetries, Duration initialDelay, double backoffMultiplier, Duration maxDelay) { + + public static RetryPolicy none() { + return new RetryPolicy(0, Duration.ZERO, 1.0, Duration.ZERO); + } + + public static RetryPolicy defaultPolicy() { + return new RetryPolicy(3, Duration.ofSeconds(1), 2.0, Duration.ofSeconds(10)); + } + + public static RetryPolicy fileOperation() { + return new RetryPolicy(2, Duration.ofMillis(500), 2.0, Duration.ofSeconds(3)); + } + + /** 适用于 LB 规则下发场景:最多重试 10 次,初始 3s,指数退避,最大间隔 10s,总等待约 60s。 */ + public static RetryPolicy lbWarmup() { + return new RetryPolicy(10, Duration.ofSeconds(3), 1.5, Duration.ofSeconds(10)); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxAcquirePhase.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxAcquirePhase.java new file mode 100644 index 000000000..851b171a2 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxAcquirePhase.java @@ -0,0 +1,49 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +import com.alibaba.himarket.service.hicoding.sandbox.SandboxInfo; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxProvider; + +/** + * 获取沙箱实例。 + * 通过 SandboxProvider.acquire() 统一处理,不直接依赖任何具体实现。 + */ +public class SandboxAcquirePhase implements InitPhase { + + @Override + public String name() { + return "sandbox-acquire"; + } + + @Override + public int order() { + return 100; + } + + @Override + public boolean shouldExecute(InitContext context) { + return true; + } + + @Override + public void execute(InitContext context) throws InitPhaseException { + try { + SandboxProvider provider = context.getProvider(); + SandboxInfo info = provider.acquire(context.getSandboxConfig()); + context.setSandboxInfo(info); + } catch (Exception e) { + throw new InitPhaseException("sandbox-acquire", "沙箱获取失败: " + e.getMessage(), e, false); + } + } + + @Override + public boolean verify(InitContext context) { + return context.getSandboxInfo() != null + && context.getSandboxInfo().host() != null + && !context.getSandboxInfo().host().isBlank(); + } + + @Override + public RetryPolicy retryPolicy() { + return RetryPolicy.none(); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxInitPipeline.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxInitPipeline.java new file mode 100644 index 000000000..69ac5410b --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxInitPipeline.java @@ -0,0 +1,175 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +import java.time.Duration; +import java.time.Instant; +import java.util.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * 沙箱初始化流水线。 按顺序执行注册的 InitPhase,每个阶段有前置检查、执行逻辑和就绪验证。 对所有沙箱类型(Local/K8s/E2B)通用。 + */ +public class SandboxInitPipeline { + + private static final Logger logger = LoggerFactory.getLogger(SandboxInitPipeline.class); + + private final List phases; + private final InitConfig initConfig; + + public SandboxInitPipeline(List phases, InitConfig initConfig) { + this.phases = new ArrayList<>(phases); + this.phases.sort(Comparator.comparingInt(InitPhase::order)); + this.initConfig = initConfig; + } + + /** 执行完整的初始化流水线。 */ + public InitResult execute(InitContext context) { + return executeFromIndex(context, 0); + } + + /** 从指定阶段恢复执行。 */ + public InitResult resumeFrom(InitContext context, String fromPhase) { + int startIndex = 0; + for (int i = 0; i < phases.size(); i++) { + if (phases.get(i).name().equals(fromPhase)) { + startIndex = i; + break; + } + } + return executeFromIndex(context, startIndex); + } + + private InitResult executeFromIndex(InitContext context, int startIndex) { + Instant start = Instant.now(); + Map phaseDurations = new LinkedHashMap<>(); + + for (int i = startIndex; i < phases.size(); i++) { + InitPhase phase = phases.get(i); + + // 检查总超时 + Duration elapsed = Duration.between(start, Instant.now()); + if (elapsed.compareTo(initConfig.totalTimeout()) > 0) { + context.setLastError( + "总超时: 已耗时 " + + elapsed.toSeconds() + + "s,超过限制 " + + initConfig.totalTimeout().toSeconds() + + "s"); + context.recordEvent(phase.name(), InitEvent.EventType.PHASE_FAIL, "总超时终止"); + return InitResult.failure( + phase.name(), + context.getLastError(), + Duration.between(start, Instant.now()), + phaseDurations, + context.getEvents()); + } + + // 检查 shouldExecute + if (!phase.shouldExecute(context)) { + context.getPhaseStatuses().put(phase.name(), PhaseStatus.SKIPPED); + context.recordEvent(phase.name(), InitEvent.EventType.PHASE_SKIP, "条件不满足,跳过"); + logger.info("[Pipeline] 跳过阶段: {}", phase.name()); + continue; + } + + context.getPhaseStatuses().put(phase.name(), PhaseStatus.EXECUTING); + context.recordEvent(phase.name(), InitEvent.EventType.PHASE_START, "开始执行"); + logger.info("[Pipeline] 开始阶段: {}", phase.name()); + Instant phaseStart = Instant.now(); + + boolean success = executeWithRetry(phase, context); + phaseDurations.put(phase.name(), Duration.between(phaseStart, Instant.now())); + + if (!success) { + context.getPhaseStatuses().put(phase.name(), PhaseStatus.FAILED); + context.recordEvent( + phase.name(), InitEvent.EventType.PHASE_FAIL, context.getLastError()); + logger.error("[Pipeline] 阶段失败: {} - {}", phase.name(), context.getLastError()); + return InitResult.failure( + phase.name(), + context.getLastError(), + Duration.between(start, Instant.now()), + phaseDurations, + context.getEvents()); + } + + // 验证阶段 + context.getPhaseStatuses().put(phase.name(), PhaseStatus.VERIFYING); + if (initConfig.enableVerification() && !phase.verify(context)) { + context.getPhaseStatuses().put(phase.name(), PhaseStatus.FAILED); + String error = "阶段 " + phase.name() + " 验证失败"; + context.setLastError(error); + context.recordEvent(phase.name(), InitEvent.EventType.VERIFY_FAIL, error); + logger.error("[Pipeline] 阶段验证失败: {}", phase.name()); + return InitResult.failure( + phase.name(), + error, + Duration.between(start, Instant.now()), + phaseDurations, + context.getEvents()); + } + + context.getPhaseStatuses().put(phase.name(), PhaseStatus.COMPLETED); + context.recordEvent(phase.name(), InitEvent.EventType.PHASE_COMPLETE, "执行完成"); + logger.info( + "[Pipeline] 阶段完成: {} (耗时 {}ms)", + phase.name(), + phaseDurations.get(phase.name()).toMillis()); + } + + return InitResult.success( + Duration.between(start, Instant.now()), phaseDurations, context.getEvents()); + } + + /** 按 RetryPolicy 执行重试逻辑。 */ + private boolean executeWithRetry(InitPhase phase, InitContext context) { + RetryPolicy policy = phase.retryPolicy(); + int maxAttempts = policy.maxRetries() + 1; + + for (int attempt = 1; attempt <= maxAttempts; attempt++) { + try { + phase.execute(context); + return true; + } catch (InitPhaseException e) { + context.setLastError(e.getMessage()); + + if (attempt < maxAttempts && e.isRetryable()) { + context.getPhaseStatuses().put(phase.name(), PhaseStatus.RETRYING); + context.recordEvent( + phase.name(), + InitEvent.EventType.PHASE_RETRY, + "第 " + attempt + " 次失败,准备重试: " + e.getMessage()); + logger.warn( + "[Pipeline] 阶段 {} 第 {}/{} 次失败,准备重试: {}", + phase.name(), + attempt, + maxAttempts, + e.getMessage()); + + long delayMs = + (long) + (policy.initialDelay().toMillis() + * Math.pow(policy.backoffMultiplier(), attempt - 1)); + long maxDelayMs = policy.maxDelay().toMillis(); + if (maxDelayMs > 0) { + delayMs = Math.min(delayMs, maxDelayMs); + } + + try { + Thread.sleep(delayMs); + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); + context.setLastError("重试等待被中断"); + return false; + } + } else { + return false; + } + } catch (Exception e) { + context.setLastError(e.getMessage()); + return false; + } + } + return false; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SidecarConnectPhase.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SidecarConnectPhase.java new file mode 100644 index 000000000..b26149d72 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SidecarConnectPhase.java @@ -0,0 +1,51 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +import com.alibaba.himarket.service.hicoding.runtime.RuntimeAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeStatus; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxProvider; + +/** + * 建立到 Sidecar Server 的 WebSocket 连接。 + * 所有沙箱类型都通过 Sidecar WebSocket 桥接 CLI,逻辑完全一致。 + */ +public class SidecarConnectPhase implements InitPhase { + + @Override + public String name() { + return "sidecar-connect"; + } + + @Override + public int order() { + return 400; + } + + @Override + public boolean shouldExecute(InitContext context) { + return true; + } + + @Override + public void execute(InitContext context) throws InitPhaseException { + try { + SandboxProvider provider = context.getProvider(); + RuntimeAdapter adapter = + provider.connectSidecar(context.getSandboxInfo(), context.getRuntimeConfig()); + context.setRuntimeAdapter(adapter); + } catch (Exception e) { + throw new InitPhaseException( + "sidecar-connect", "Sidecar 连接失败: " + e.getMessage(), e, true); + } + } + + @Override + public boolean verify(InitContext context) { + RuntimeAdapter adapter = context.getRuntimeAdapter(); + return adapter != null && adapter.getStatus() == RuntimeStatus.RUNNING; + } + + @Override + public RetryPolicy retryPolicy() { + return RetryPolicy.none(); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SkillDownloadPhase.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SkillDownloadPhase.java new file mode 100644 index 000000000..d137cb0c0 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/sandbox/init/SkillDownloadPhase.java @@ -0,0 +1,170 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +import com.alibaba.himarket.service.hicoding.sandbox.ExecResult; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxInfo; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxProvider; +import com.alibaba.himarket.service.hicoding.session.ResolvedSessionConfig; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.Duration; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Skill 下载阶段。 + * 在 ConfigInjectionPhase(300)之后、SidecarConnectPhase(400)之前执行。 + * 按 nacosId 分组,通过 sidecar exec API 调用 nacos-cli skill-get 批量下载 Skill 文件。 + */ +public class SkillDownloadPhase implements InitPhase { + + private static final Logger logger = LoggerFactory.getLogger(SkillDownloadPhase.class); + private static final Duration EXEC_TIMEOUT = Duration.ofSeconds(60); + private static final String NACOS_ENV_DIR = ".nacos"; + + private static final Map PROVIDER_SKILLS_DIR = + Map.of( + "qodercli", ".qoder/skills/", + "claude-code", ".claude/skills/", + "qwen-code", ".qwen/skills/", + "opencode", ".opencode/skills/"); + + @Override + public String name() { + return "skill-download"; + } + + @Override + public int order() { + return 350; + } + + @Override + public boolean shouldExecute(InitContext context) { + ResolvedSessionConfig resolved = context.getResolvedSessionConfig(); + return resolved != null && resolved.getSkills() != null && !resolved.getSkills().isEmpty(); + } + + @Override + public void execute(InitContext context) throws InitPhaseException { + ResolvedSessionConfig resolved = context.getResolvedSessionConfig(); + List skills = resolved.getSkills(); + String providerKey = context.getRuntimeConfig().getProviderKey(); + String skillsDir = PROVIDER_SKILLS_DIR.getOrDefault(providerKey, "skills/"); + SandboxProvider provider = context.getProvider(); + SandboxInfo info = context.getSandboxInfo(); + + // exec 不会自动转换相对路径,需要使用绝对路径(writeFile 会做转换,exec 不会) + String workspacePath = info.workspacePath(); + + // 按 nacosId 分组 + Map> byNacosId = + skills.stream() + .collect( + Collectors.groupingBy( + ResolvedSessionConfig.ResolvedSkillEntry::getNacosId, + LinkedHashMap::new, + Collectors.toList())); + + logger.info( + "[SkillDownload] 开始下载 {} 个 Skill ({} 个 Nacos 实例), provider={}, skillsDir={}", + skills.size(), + byNacosId.size(), + providerKey, + skillsDir); + + int successGroups = 0; + for (var entry : byNacosId.entrySet()) { + String nacosId = entry.getKey(); + List skillNames = + entry.getValue().stream() + .map(ResolvedSessionConfig.ResolvedSkillEntry::getSkillName) + .toList(); + // 使用绝对路径,因为 exec 的 cwd 可能不是用户的 workspace 目录 + String nacosEnvPath = + toAbsolutePath( + workspacePath, NACOS_ENV_DIR + "/nacos-env-" + nacosId + ".yaml"); + String absoluteSkillsDir = toAbsolutePath(workspacePath, skillsDir); + + // 构建参数: skill-get skill1 skill2 ... --config path -o dir + List args = new ArrayList<>(); + args.add("skill-get"); + args.addAll(skillNames); + args.add("--config"); + args.add(nacosEnvPath); + args.add("-o"); + args.add(absoluteSkillsDir); + + try { + ExecResult result = provider.exec(info, "nacos-cli", args, EXEC_TIMEOUT); + + if (result.exitCode() != 0) { + logger.warn( + "[SkillDownload] Skill 下载失败: nacosId={}, skills={}, exitCode={}," + + " stderr={}", + nacosId, + skillNames, + result.exitCode(), + result.stderr()); + } else { + successGroups++; + logger.info( + "[SkillDownload] Skill 下载成功: nacosId={}, skills={}", + nacosId, + skillNames); + } + } catch (Exception e) { + logger.warn( + "[SkillDownload] Skill 下载异常: nacosId={}, skills={}, error={}", + nacosId, + skillNames, + e.getMessage()); + } + } + + logger.info("[SkillDownload] 下载完成: {}/{} 分组成功", successGroups, byNacosId.size()); + } + + @Override + public boolean verify(InitContext context) { + return true; + } + + @Override + public RetryPolicy retryPolicy() { + return RetryPolicy.none(); + } + + /** + * 将相对路径转换为基于 workspacePath 的绝对路径。 + * 与 RemoteSandboxProvider.toAbsolutePath 逻辑一致。 + */ + private static String toAbsolutePath(String workspacePath, String relativePath) { + if (workspacePath == null || workspacePath.isEmpty()) { + return relativePath; + } + String cleaned = relativePath; + if (cleaned.startsWith("./")) { + cleaned = cleaned.substring(2); + } else if (cleaned.startsWith("/")) { + Path normalized = Paths.get(cleaned).normalize(); + if (!normalized.startsWith(Paths.get(workspacePath).normalize())) { + throw new SecurityException("路径越界: " + relativePath); + } + return normalized.toString(); + } + String full = + workspacePath.endsWith("/") + ? workspacePath + cleaned + : workspacePath + "/" + cleaned; + Path normalized = Paths.get(full).normalize(); + if (!normalized.startsWith(Paths.get(workspacePath).normalize())) { + throw new SecurityException("路径越界: " + relativePath); + } + return normalized.toString(); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/CliSessionConfig.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/CliSessionConfig.java new file mode 100644 index 000000000..996eb9012 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/CliSessionConfig.java @@ -0,0 +1,43 @@ +package com.alibaba.himarket.service.hicoding.session; + +import java.util.List; +import lombok.Data; + +/** + * CLI 会话配置,前端传入的纯标识符 DTO。 + * 承载 modelProductId、mcpServers[].productId、skills[].productId、认证凭据等, + * 通过 WebSocket 消息传递给后端。 + */ +@Data +public class CliSessionConfig { + + /** 市场模型产品 ID(可选) */ + private String modelProductId; + + /** 选中的 MCP Server 列表(简化为标识符) */ + private List mcpServers; + + /** 选中的 Skill 列表(简化为标识符) */ + private List skills; + + /** 认证凭据(PAT / API Key),用于注入到 CLI 进程环境变量中(可选) */ + private String authToken; + + @Data + public static class McpServerEntry { + /** MCP 产品 ID */ + private String productId; + + /** MCP 服务名称 */ + private String name; + } + + @Data + public static class SkillEntry { + /** Skill 产品 ID */ + private String productId; + + /** Skill 名称 */ + private String name; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/CustomModelConfig.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/CustomModelConfig.java new file mode 100644 index 000000000..f365dee11 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/CustomModelConfig.java @@ -0,0 +1,63 @@ +package com.alibaba.himarket.service.hicoding.session; + +import java.util.List; +import java.util.Set; +import lombok.Data; + +/** + * 自定义模型配置,包含模型接入点 URL、API Key、模型 ID、显示名称和协议类型。 + */ +@Data +public class CustomModelConfig { + + private static final Set ALLOWED_PROTOCOL_TYPES = + Set.of("openai", "anthropic", "gemini"); + + /** 模型接入点 URL */ + private String baseUrl; + + /** API Key */ + private String apiKey; + + /** 模型 ID */ + private String modelId; + + /** 模型显示名称 */ + private String modelName; + + /** 协议类型: openai | anthropic | gemini,默认 openai */ + private String protocolType = "openai"; + + /** + * 校验配置的合法性。 + * + * @return 校验错误信息列表,为空表示校验通过 + */ + public List validate() { + java.util.ArrayList errors = new java.util.ArrayList<>(); + + // 校验 baseUrl:非空且为合法 URL(以 http:// 或 https:// 开头) + if (baseUrl == null || baseUrl.isBlank()) { + errors.add("baseUrl 不能为空"); + } else if (!baseUrl.startsWith("http://") && !baseUrl.startsWith("https://")) { + errors.add("baseUrl 格式不合法,必须以 http:// 或 https:// 开头"); + } + + // 校验 apiKey:非空 + if (apiKey == null || apiKey.isBlank()) { + errors.add("apiKey 不能为空,缺少凭证"); + } + + // 校验 modelId:非空 + if (modelId == null || modelId.isBlank()) { + errors.add("modelId 不能为空,缺少模型标识"); + } + + // 校验 protocolType:必须在允许范围内 + if (protocolType == null || !ALLOWED_PROTOCOL_TYPES.contains(protocolType)) { + errors.add("protocolType 必须是 openai、anthropic、gemini 之一"); + } + + return errors; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/McpConfigResolver.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/McpConfigResolver.java new file mode 100644 index 000000000..537e95221 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/McpConfigResolver.java @@ -0,0 +1,142 @@ +package com.alibaba.himarket.service.hicoding.session; + +import com.alibaba.himarket.core.security.ContextHolder; +import com.alibaba.himarket.dto.result.consumer.CredentialContext; +import com.alibaba.himarket.dto.result.product.ProductResult; +import com.alibaba.himarket.service.ConsumerService; +import com.alibaba.himarket.service.ProductService; +import com.alibaba.himarket.support.chat.mcp.MCPTransportConfig; +import com.alibaba.himarket.support.enums.MCPTransportMode; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +/** + * 根据 MCP 产品 ID 列表解析完整 MCP 连接配置的服务。 + * + *

复用 {@code CliProviderController.buildMarketMcpInfo()} 和 {@code extractAuthHeaders()} 中的逻辑: + *

    + *
  1. 批量获取产品详情
  2. + *
  3. 通过 {@code product.getMcpConfig().toTransportConfig()} 提取 url 和 transportType
  4. + *
  5. 通过 {@code ConsumerService.getDefaultCredential()} 提取认证请求头
  6. + *
  7. 组装 ResolvedMcpEntry
  8. + *
+ */ +@Service +@RequiredArgsConstructor +@Slf4j +public class McpConfigResolver { + + private final ConsumerService consumerService; + private final ProductService productService; + private final ContextHolder contextHolder; + + /** + * 根据 MCP 产品 ID 列表解析完整 MCP 连接配置。 + * + * @param mcpEntries 前端传入的 MCP 标识符列表 + * @return 解析后的 ResolvedMcpEntry 列表(解析失败的条目被跳过) + */ + public List resolve( + List mcpEntries) { + if (mcpEntries == null || mcpEntries.isEmpty()) { + return Collections.emptyList(); + } + + // 1. 批量获取产品详情 + List productIds = + mcpEntries.stream() + .map(CliSessionConfig.McpServerEntry::getProductId) + .collect(Collectors.toList()); + Map productMap = productService.getProducts(productIds); + + // 2. 获取认证头(所有 MCP 共用同一个开发者的认证信息) + Map authHeaders = extractAuthHeaders(); + + // 3. 逐个解析 MCP 配置 + List result = new ArrayList<>(); + for (CliSessionConfig.McpServerEntry entry : mcpEntries) { + ResolvedSessionConfig.ResolvedMcpEntry resolved = + resolveEntry(entry, productMap, authHeaders); + if (resolved != null) { + result.add(resolved); + } + } + return result; + } + + private ResolvedSessionConfig.ResolvedMcpEntry resolveEntry( + CliSessionConfig.McpServerEntry entry, + Map productMap, + Map authHeaders) { + String productId = entry.getProductId(); + ProductResult product = productMap.get(productId); + + if (product == null) { + log.warn("MCP product not found, skipping: productId={}", productId); + return null; + } + + if (product.getMcpConfig() == null) { + log.warn( + "Product mcpConfig is incomplete, skipping: productId={}, name={}", + productId, + product.getName()); + return null; + } + + try { + MCPTransportConfig transportConfig = product.getMcpConfig().toTransportConfig(); + if (transportConfig == null) { + log.warn( + "Failed to extract transport config from product, skipping: productId={}," + + " name={}", + productId, + product.getName()); + return null; + } + + String transportType = + transportConfig.getTransportMode() == MCPTransportMode.STREAMABLE_HTTP + ? "streamable-http" + : "sse"; + + ResolvedSessionConfig.ResolvedMcpEntry resolved = + new ResolvedSessionConfig.ResolvedMcpEntry(); + resolved.setName(entry.getName()); + resolved.setUrl(transportConfig.getUrl()); + resolved.setTransportType(transportType); + resolved.setHeaders(authHeaders); + return resolved; + } catch (Exception e) { + log.warn( + "Error processing mcpConfig for product, skipping: productId={}, name={}," + + " error={}", + productId, + product.getName(), + e.getMessage()); + return null; + } + } + + /** + * 提取当前开发者的认证请求头。 + * 复用 CliProviderController.extractAuthHeaders() 逻辑。 + */ + private Map extractAuthHeaders() { + try { + CredentialContext credentialContext = + consumerService.getDefaultCredential(contextHolder.getUser()); + Map headers = credentialContext.copyHeaders(); + return headers.isEmpty() ? null : headers; + } catch (Exception e) { + log.debug("Failed to get auth headers: {}", e.getMessage()); + return null; + } + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/ModelConfigResolver.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/ModelConfigResolver.java new file mode 100644 index 000000000..1620d6e04 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/ModelConfigResolver.java @@ -0,0 +1,179 @@ +package com.alibaba.himarket.service.hicoding.session; + +import com.alibaba.himarket.dto.result.consumer.ConsumerCredentialResult; +import com.alibaba.himarket.dto.result.consumer.ConsumerResult; +import com.alibaba.himarket.dto.result.model.ModelConfigResult; +import com.alibaba.himarket.dto.result.product.ProductResult; +import com.alibaba.himarket.dto.result.product.SubscriptionResult; +import com.alibaba.himarket.service.ConsumerService; +import com.alibaba.himarket.service.ProductService; +import com.alibaba.himarket.service.hicoding.cli.ProtocolTypeMapper; +import com.alibaba.himarket.service.hicoding.filesystem.BaseUrlExtractor; +import com.alibaba.himarket.support.consumer.ApiKeyConfig; +import com.alibaba.himarket.support.enums.SubscriptionStatus; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +/** + * 根据市场产品 ID 解析完整模型配置的服务。 + * + *

解析流程: + *

    + *
  1. 获取当前开发者的 Primary Consumer
  2. + *
  3. 获取订阅列表并筛选 APPROVED 状态
  4. + *
  5. 获取产品详情,提取 baseUrl、protocolType、modelId
  6. + *
  7. 获取 apiKey
  8. + *
  9. 组装 CustomModelConfig
  10. + *
+ */ +@Service +@RequiredArgsConstructor +@Slf4j +public class ModelConfigResolver { + + private final ConsumerService consumerService; + private final ProductService productService; + + /** + * 根据市场产品 ID 解析完整模型配置。 + * + * @param modelProductId 市场产品 ID + * @param userId 开发者 ID(用于异步线程上下文,避免依赖 SecurityContextHolder) + * @return 解析后的 CustomModelConfig,解析失败时返回 null + */ + public CustomModelConfig resolve(String modelProductId, String userId) { + log.info("[ModelConfigResolver] ===== 开始解析 ===== modelProductId={}", modelProductId); + + // 1. 获取 Primary Consumer + ConsumerResult consumer; + try { + consumer = consumerService.getPrimaryConsumer(userId); + log.info( + "[ModelConfigResolver] Primary Consumer 获取成功: consumerId={}", + consumer.getConsumerId()); + } catch (Exception e) { + log.warn("[ModelConfigResolver] 无法获取 Primary Consumer: {}", e.getMessage()); + return null; + } + + String consumerId = consumer.getConsumerId(); + + // 2. 获取订阅列表,筛选 APPROVED 状态 + List subscriptions = + consumerService.listConsumerSubscriptions(consumerId); + List approvedProductIds = + subscriptions.stream() + .filter(s -> SubscriptionStatus.APPROVED.name().equals(s.getStatus())) + .map(SubscriptionResult::getProductId) + .collect(Collectors.toList()); + + log.info( + "[ModelConfigResolver] 订阅列表: total={}, approved={}", + subscriptions.size(), + approvedProductIds.size()); + + if (!approvedProductIds.contains(modelProductId)) { + log.warn("[ModelConfigResolver] 产品未订阅或订阅未批准: modelProductId={}", modelProductId); + return null; + } + + // 3. 获取产品详情 + Map productMap = productService.getProducts(List.of(modelProductId)); + ProductResult product = productMap.get(modelProductId); + if (product == null) { + log.warn("[ModelConfigResolver] 产品不存在: modelProductId={}", modelProductId); + return null; + } + + log.info( + "[ModelConfigResolver] 产品获取成功: name={}, type={}", + product.getName(), + product.getType()); + + // 4. 提取 baseUrl + ModelConfigResult modelConfig = product.getModelConfig(); + if (modelConfig == null || modelConfig.getModelAPIConfig() == null) { + log.warn( + "[ModelConfigResolver] 产品 modelConfig 不完整: modelProductId={}, name={}", + modelProductId, + product.getName()); + return null; + } + + String baseUrl = BaseUrlExtractor.extract(modelConfig.getModelAPIConfig().getRoutes()); + if (baseUrl == null) { + log.warn( + "[ModelConfigResolver] 无法从路由中提取 baseUrl: modelProductId={}, name={}", + modelProductId, + product.getName()); + return null; + } + + log.info("[ModelConfigResolver] baseUrl 提取成功: {}", baseUrl); + + // 5. 提取 protocolType + String protocolType = + ProtocolTypeMapper.map(modelConfig.getModelAPIConfig().getAiProtocols()); + + // 6. 提取 modelId + String modelId = null; + if (product.getFeature() != null + && product.getFeature().getModelFeature() != null + && product.getFeature().getModelFeature().getModel() != null) { + modelId = product.getFeature().getModelFeature().getModel(); + } + + // 7. 提取 apiKey + String apiKey = extractApiKey(consumerId); + if (apiKey == null) { + log.warn( + "Failed to extract apiKey: modelProductId={}, consumerId={}", + modelProductId, + consumerId); + return null; + } + + // 8. 组装 CustomModelConfig + CustomModelConfig config = new CustomModelConfig(); + config.setBaseUrl(baseUrl); + config.setApiKey(apiKey); + config.setModelId(modelId); + config.setModelName(product.getName()); + config.setProtocolType(protocolType); + return config; + } + + private String extractApiKey(String consumerId) { + log.info("[ModelConfigResolver] 开始提取 apiKey: consumerId={}", consumerId); + try { + ConsumerCredentialResult credential = consumerService.getCredential(consumerId); + if (credential == null) { + log.warn("[ModelConfigResolver] credential 为 null: consumerId={}", consumerId); + return null; + } + if (credential.getApiKeyConfig() == null) { + log.warn("[ModelConfigResolver] apiKeyConfig 为 null: consumerId={}", consumerId); + return null; + } + ApiKeyConfig apiKeyConfig = credential.getApiKeyConfig(); + if (apiKeyConfig.getCredentials() == null || apiKeyConfig.getCredentials().isEmpty()) { + log.warn("[ModelConfigResolver] credentials 为空: consumerId={}", consumerId); + return null; + } + String apiKey = apiKeyConfig.getCredentials().get(0).getApiKey(); + log.info("[ModelConfigResolver] apiKey 提取成功: consumerId={}", consumerId); + return apiKey; + } catch (Exception e) { + log.warn( + "[ModelConfigResolver] 提取 apiKey 失败: consumerId={}, error={}", + consumerId, + e.getMessage(), + e); + return null; + } + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/ResolvedSessionConfig.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/ResolvedSessionConfig.java new file mode 100644 index 000000000..55940b24a --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/ResolvedSessionConfig.java @@ -0,0 +1,58 @@ +package com.alibaba.himarket.service.hicoding.session; + +import java.util.List; +import java.util.Map; +import lombok.Data; + +/** + * 后端解析后的完整会话配置 DTO。 + * 由 CliSessionConfig(纯标识符)经后端解析服务填充而成,供 CliConfigGenerator 使用。 + */ +@Data +public class ResolvedSessionConfig { + + /** 解析后的完整模型配置(可能为 null) */ + private CustomModelConfig customModelConfig; + + /** 解析后的 MCP Server 列表(含完整连接信息) */ + private List mcpServers; + + /** 解析后的 Skill 列表(含坐标+凭证) */ + private List skills; + + /** 认证凭据(直接透传) */ + private String authToken; + + @Data + public static class ResolvedMcpEntry { + /** MCP 服务名称 */ + private String name; + + /** MCP 端点 URL */ + private String url; + + /** 传输协议类型:sse 或 streamable-http */ + private String transportType; + + /** 认证请求头(可能为 null) */ + private Map headers; + } + + @Data + public static class ResolvedSkillEntry { + /** Skill 名称 */ + private String name; + + // Skill 坐标 + private String nacosId; + private String namespace; + private String skillName; + + // Nacos 凭证 + private String serverAddr; + private String username; + private String password; + private String accessKey; + private String secretKey; + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/SessionConfigResolver.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/SessionConfigResolver.java new file mode 100644 index 000000000..f71f0481d --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/SessionConfigResolver.java @@ -0,0 +1,146 @@ +package com.alibaba.himarket.service.hicoding.session; + +import com.alibaba.himarket.entity.NacosInstance; +import com.alibaba.himarket.entity.Product; +import com.alibaba.himarket.repository.ProductRepository; +import com.alibaba.himarket.service.NacosService; +import com.alibaba.himarket.support.product.ProductFeature; +import com.alibaba.himarket.support.product.SkillConfig; +import java.util.ArrayList; +import java.util.List; +import lombok.RequiredArgsConstructor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; + +/** + * 会话配置解析服务。 + * + *

将前端传入的标识符({@link CliSessionConfig})解析为完整的配置信息({@link ResolvedSessionConfig})。 + */ +@Service +@RequiredArgsConstructor +public class SessionConfigResolver { + + private static final Logger logger = LoggerFactory.getLogger(SessionConfigResolver.class); + + private final ModelConfigResolver modelConfigResolver; + private final McpConfigResolver mcpConfigResolver; + private final ProductRepository productRepository; + private final NacosService nacosService; + + public ResolvedSessionConfig resolve(CliSessionConfig sessionConfig, String userId) { + ResolvedSessionConfig resolved = new ResolvedSessionConfig(); + resolved.setAuthToken(sessionConfig.getAuthToken()); + resolveModelConfig(sessionConfig, userId, resolved); + resolveMcpConfig(sessionConfig, resolved); + resolveSkillConfig(sessionConfig, resolved); + return resolved; + } + + private void resolveModelConfig( + CliSessionConfig config, String userId, ResolvedSessionConfig resolved) { + if (config.getModelProductId() == null || config.getModelProductId().isBlank()) { + logger.info("[Sandbox-Config] 未提供 modelProductId,跳过模型配置解析"); + return; + } + logger.info("[Sandbox-Config] 开始解析模型配置: modelProductId={}", config.getModelProductId()); + try { + CustomModelConfig customModelConfig = + modelConfigResolver.resolve(config.getModelProductId(), userId); + if (customModelConfig != null) { + resolved.setCustomModelConfig(customModelConfig); + logger.info( + "[Sandbox-Config] 模型配置解析成功: modelProductId={}, baseUrl={}, hasApiKey={}", + config.getModelProductId(), + customModelConfig.getBaseUrl(), + customModelConfig.getApiKey() != null); + } else { + logger.warn( + "[Sandbox-Config] 模型配置解析返回 null: modelProductId={}", + config.getModelProductId()); + } + } catch (Exception e) { + logger.error( + "[Sandbox-Config] 模型配置解析失败: modelProductId={}, error={}", + config.getModelProductId(), + e.getMessage(), + e); + } + } + + private void resolveMcpConfig(CliSessionConfig config, ResolvedSessionConfig resolved) { + if (config.getMcpServers() == null || config.getMcpServers().isEmpty()) { + return; + } + try { + List resolvedMcpServers = + mcpConfigResolver.resolve(config.getMcpServers()); + resolved.setMcpServers(resolvedMcpServers); + } catch (Exception e) { + logger.error("[Sandbox-Config] MCP 配置解析失败: error={}", e.getMessage(), e); + } + } + + /** + * 解析 Skill 配置:从 Product 读取 SkillConfig 坐标 + 从 NacosInstance 提取凭证。 + */ + private void resolveSkillConfig(CliSessionConfig config, ResolvedSessionConfig resolved) { + if (config.getSkills() == null || config.getSkills().isEmpty()) { + return; + } + List resolvedSkills = new ArrayList<>(); + for (CliSessionConfig.SkillEntry skillEntry : config.getSkills()) { + if (skillEntry.getProductId() == null || skillEntry.getProductId().isBlank()) { + continue; + } + try { + Product product = + productRepository.findByProductId(skillEntry.getProductId()).orElse(null); + if (product == null) { + logger.warn( + "[Sandbox-Config] Skill Product 不存在, 跳过: productId={}", + skillEntry.getProductId()); + continue; + } + ProductFeature feature = product.getFeature(); + if (feature == null || feature.getSkillConfig() == null) { + logger.warn( + "[Sandbox-Config] Skill Product 无 SkillConfig, 跳过: productId={}", + skillEntry.getProductId()); + continue; + } + SkillConfig skillConfig = feature.getSkillConfig(); + if (skillConfig.getNacosId() == null || skillConfig.getSkillName() == null) { + logger.warn( + "[Sandbox-Config] SkillConfig 坐标不完整, 跳过: productId={}", + skillEntry.getProductId()); + continue; + } + + NacosInstance nacos = nacosService.findNacosInstanceById(skillConfig.getNacosId()); + + ResolvedSessionConfig.ResolvedSkillEntry resolvedSkill = + new ResolvedSessionConfig.ResolvedSkillEntry(); + resolvedSkill.setName(skillEntry.getName()); + resolvedSkill.setNacosId(skillConfig.getNacosId()); + resolvedSkill.setNamespace(skillConfig.getNamespace()); + resolvedSkill.setSkillName(skillConfig.getSkillName()); + resolvedSkill.setServerAddr(nacos.getServerUrl()); + resolvedSkill.setUsername(nacos.getUsername()); + resolvedSkill.setPassword(nacos.getPassword()); + resolvedSkill.setAccessKey(nacos.getAccessKey()); + resolvedSkill.setSecretKey(nacos.getSecretKey()); + resolvedSkills.add(resolvedSkill); + } catch (Exception e) { + logger.error( + "[Sandbox-Config] Skill 坐标解析失败, 跳过: productId={}, name={}, error={}", + skillEntry.getProductId(), + skillEntry.getName(), + e.getMessage(), + e); + } + } + resolved.setSkills(resolvedSkills); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/SessionInitializer.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/SessionInitializer.java new file mode 100644 index 000000000..076bf19b6 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/session/SessionInitializer.java @@ -0,0 +1,225 @@ +package com.alibaba.himarket.service.hicoding.session; + +import com.alibaba.himarket.config.AcpProperties.CliProviderConfig; +import com.alibaba.himarket.service.hicoding.cli.ConfigFileBuilder; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeConfig; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxConfig; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxInfo; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxProvider; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxProviderRegistry; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import com.alibaba.himarket.service.hicoding.sandbox.init.ConfigInjectionPhase; +import com.alibaba.himarket.service.hicoding.sandbox.init.FileSystemReadyPhase; +import com.alibaba.himarket.service.hicoding.sandbox.init.InitConfig; +import com.alibaba.himarket.service.hicoding.sandbox.init.InitContext; +import com.alibaba.himarket.service.hicoding.sandbox.init.InitErrorCode; +import com.alibaba.himarket.service.hicoding.sandbox.init.InitResult; +import com.alibaba.himarket.service.hicoding.sandbox.init.SandboxAcquirePhase; +import com.alibaba.himarket.service.hicoding.sandbox.init.SandboxInitPipeline; +import com.alibaba.himarket.service.hicoding.sandbox.init.SidecarConnectPhase; +import com.alibaba.himarket.service.hicoding.sandbox.init.SkillDownloadPhase; +import java.time.Duration; +import java.util.List; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; +import org.springframework.web.socket.WebSocketSession; + +/** + * 会话初始化器。 + * + *

编排沙箱初始化的完整流程:获取 Provider → 注入 authToken → 解析配置 → + * 构建 SandboxConfig/InitContext → 执行 Pipeline → 返回结果。 + * + *

从 {@code HiCodingWebSocketHandler.initSandboxAsync()} 中提取, + * 不包含 WebSocket 消息发送、stdout 订阅、连接状态管理等职责。 + */ +@Component +public class SessionInitializer { + + private static final Logger logger = LoggerFactory.getLogger(SessionInitializer.class); + + private final SessionConfigResolver configResolver; + private final ConfigFileBuilder configFileBuilder; + private final SandboxProviderRegistry providerRegistry; + + public SessionInitializer( + SessionConfigResolver configResolver, + ConfigFileBuilder configFileBuilder, + SandboxProviderRegistry providerRegistry) { + this.configResolver = configResolver; + this.configFileBuilder = configFileBuilder; + this.providerRegistry = providerRegistry; + } + + /** + * 初始化结果。 + */ + public record InitializationResult( + boolean success, + RuntimeAdapter adapter, + SandboxInfo sandboxInfo, + InitErrorCode errorCode, + String errorMessage, + String failedPhase, + boolean retryable, + Duration totalDuration) {} + + /** + * 执行沙箱初始化。 + * + * @param userId 用户 ID + * @param providerKey CLI 提供者标识 + * @param providerConfig CLI 提供者配置 + * @param runtimeConfig 运行时配置 + * @param sessionConfig 前端传入的会话配置(可为 null) + * @param sandboxType 沙箱类型 + * @param frontendSession 前端 WebSocket session(传递给 InitContext,用于阶段内推送进度) + * @return 初始化结果 + */ + public InitializationResult initialize( + String userId, + String providerKey, + CliProviderConfig providerConfig, + RuntimeConfig runtimeConfig, + CliSessionConfig sessionConfig, + SandboxType sandboxType, + WebSocketSession frontendSession) { + + try { + // 1. 获取 Provider + SandboxProvider provider = providerRegistry.getProvider(sandboxType); + + // 2. 注入 authToken 到 CLI 进程环境变量 + injectAuthToken(sessionConfig, providerConfig, runtimeConfig, providerKey); + + // 3. 解析配置(仅在 sessionConfig 非空且 provider 支持自定义模型时) + ResolvedSessionConfig resolved = null; + if (sessionConfig != null && providerConfig.isSupportsCustomModel()) { + resolved = configResolver.resolve(sessionConfig, userId); + } + + // 4. 构建 SandboxConfig + SandboxConfig sandboxConfig = + new SandboxConfig( + userId, + sandboxType, + runtimeConfig.getCwd(), + runtimeConfig.getEnv() != null ? runtimeConfig.getEnv() : Map.of(), + Map.of(), + null); + + // 5. 构建 InitContext(设置 resolvedSessionConfig) + InitContext context = + new InitContext( + provider, + userId, + sandboxConfig, + runtimeConfig, + providerConfig, + sessionConfig, + frontendSession); + context.setResolvedSessionConfig(resolved); + + // 6. 构建 Pipeline(传入 ConfigFileBuilder 给 ConfigInjectionPhase) + SandboxInitPipeline pipeline = + new SandboxInitPipeline( + List.of( + new SandboxAcquirePhase(), + new FileSystemReadyPhase(), + new ConfigInjectionPhase(configFileBuilder), + new SkillDownloadPhase(), + new SidecarConnectPhase()), + InitConfig.defaults()); + + // 7. 执行 Pipeline + InitResult result = pipeline.execute(context); + + // 8. 转换为 InitializationResult + return toInitializationResult(result, context); + + } catch (Exception e) { + logger.error( + "[SessionInitializer] 初始化异常: userId={}, provider={}, error={}", + userId, + providerKey, + e.getMessage(), + e); + return new InitializationResult( + false, + null, + null, + InitErrorCode.UNKNOWN_ERROR, + e.getMessage(), + null, + false, + Duration.ZERO); + } + } + + /** + * 注入 authToken 到 CLI 进程环境变量。 + */ + private void injectAuthToken( + CliSessionConfig sessionConfig, + CliProviderConfig providerConfig, + RuntimeConfig runtimeConfig, + String providerKey) { + if (sessionConfig == null || sessionConfig.getAuthToken() == null) { + return; + } + if (providerConfig.getAuthEnvVar() != null) { + if (runtimeConfig.getEnv() != null) { + runtimeConfig + .getEnv() + .put(providerConfig.getAuthEnvVar(), sessionConfig.getAuthToken()); + logger.info( + "[SessionInitializer] authToken injected to env var '{}' for provider" + + " '{}', current env size: {}", + providerConfig.getAuthEnvVar(), + providerKey, + runtimeConfig.getEnv().size()); + } else { + logger.error( + "[SessionInitializer] runtimeConfig.getEnv() is null, cannot inject" + + " authToken"); + } + } else { + logger.warn( + "[SessionInitializer] Received authToken but authEnvVar is not configured" + + " for provider: {}, ignoring authToken", + providerKey); + } + } + + /** + * 将 Pipeline 的 InitResult 转换为 InitializationResult。 + */ + private InitializationResult toInitializationResult(InitResult result, InitContext context) { + if (result.success()) { + return new InitializationResult( + true, + context.getRuntimeAdapter(), + context.getSandboxInfo(), + null, + null, + null, + false, + result.totalDuration()); + } + + InitErrorCode errorCode = InitErrorCode.fromPhaseName(result.failedPhase()); + + return new InitializationResult( + false, + null, + context.getSandboxInfo(), + errorCode, + result.errorMessage(), + result.failedPhase(), + false, + result.totalDuration()); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/RemoteTerminalBackend.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/RemoteTerminalBackend.java new file mode 100644 index 000000000..7d4f26d6f --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/RemoteTerminalBackend.java @@ -0,0 +1,292 @@ +package com.alibaba.himarket.service.hicoding.terminal; + +import java.io.IOException; +import java.net.URI; +import java.nio.charset.StandardCharsets; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Executors; +import java.util.concurrent.RejectedExecutionException; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.reactive.socket.WebSocketMessage; +import org.springframework.web.reactive.socket.client.ReactorNettyWebSocketClient; +import reactor.core.Disposable; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.core.publisher.Sinks; + +/** + * 远程终端后端。 + * 通过 WebSocket 连接 Sidecar 的 /terminal 端点, + * Sidecar 端使用 node-pty 提供交互式 PTY shell。 + * 不依赖 K8s API,适用于任意可达的 Sidecar 服务。 + */ +public class RemoteTerminalBackend implements TerminalBackend { + + private static final Logger logger = LoggerFactory.getLogger(RemoteTerminalBackend.class); + private static final long HEARTBEAT_INTERVAL_SECONDS = 30; + private static final int MAX_RECONNECT_ATTEMPTS = 5; + private static final long MAX_BACKOFF_MS = 30_000; + + private final String host; + private final int port; + private final String cwd; + + private final Sinks.Many outputSink = Sinks.many().multicast().onBackpressureBuffer(); + private final Sinks.Many sendSink = Sinks.many().unicast().onBackpressureBuffer(); + private final AtomicReference + wsSessionRef = new AtomicReference<>(); + private Disposable wsConnection; + private volatile boolean closed = false; + + // 心跳保活 + private final ScheduledExecutorService scheduler = + Executors.newScheduledThreadPool( + 1, + r -> { + Thread t = new Thread(r, "remote-terminal-scheduler"); + t.setDaemon(true); + return t; + }); + private ScheduledFuture pingFuture; + + // 断连重连 + private final AtomicInteger reconnectAttempts = new AtomicInteger(0); + private volatile boolean reconnecting = false; + private volatile int lastCols; + private volatile int lastRows; + + public RemoteTerminalBackend(String host, int port, String cwd) { + this.host = host; + this.port = port; + this.cwd = cwd; + } + + @Override + public void start(int cols, int rows) throws IOException { + this.lastCols = cols; + this.lastRows = rows; + doConnect(cols, rows, true); + logger.info("[RemoteTerminal] Connected to sidecar terminal"); + startHeartbeat(); + } + + private void doConnect(int cols, int rows, boolean blocking) throws IOException { + String uriStr = + String.format( + "ws://%s:%d/terminal?cols=%d&rows=%d&cwd=%s", + host, + port, + cols, + rows, + java.net.URLEncoder.encode(cwd, StandardCharsets.UTF_8)); + URI wsUri = URI.create(uriStr); + + logger.info("[RemoteTerminal] Connecting to {}", wsUri); + + ReactorNettyWebSocketClient wsClient = new ReactorNettyWebSocketClient(); + CountDownLatch connectedLatch = blocking ? new CountDownLatch(1) : null; + + wsConnection = + wsClient.execute( + wsUri, + session -> { + wsSessionRef.set(session); + if (connectedLatch != null) { + connectedLatch.countDown(); + } + + Flux outgoing = + sendSink.asFlux().map(session::textMessage); + + return session.send(outgoing) + .and( + session.receive() + .doOnNext( + msg -> { + var buf = msg.getPayload(); + byte[] data = + new byte + [buf + .readableByteCount()]; + buf.read(data); + outputSink.tryEmitNext( + data); + }) + .doOnComplete( + () -> { + if (!closed) { + logger.info( + "[RemoteTerminal]" + + " Connection" + + " closed" + + " by sidecar"); + scheduleReconnect(); + } + }) + .doOnError( + err -> { + if (!closed) { + logger.error( + "[RemoteTerminal]" + + " Connection" + + " error", + err); + scheduleReconnect(); + } + }) + .then()); + }) + .subscribe(); + + if (blocking) { + try { + if (!connectedLatch.await(10, TimeUnit.SECONDS)) { + throw new IOException("连接远程终端超时: " + wsUri); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new IOException("连接远程终端被中断", e); + } + } + } + + private static final String HEARTBEAT_MSG = "{\"type\":\"heartbeat\"}"; + + private void startHeartbeat() { + stopHeartbeat(); + try { + pingFuture = + scheduler.scheduleAtFixedRate( + () -> { + try { + var session = wsSessionRef.get(); + if (session == null || !session.isOpen()) { + return; + } + session.send(Mono.just(session.textMessage(HEARTBEAT_MSG))) + .subscribe( + unused -> {}, + err -> + logger.warn( + "[RemoteTerminal] Heartbeat" + + " failed: {}", + err.getMessage())); + } catch (Exception e) { + logger.warn( + "[RemoteTerminal] Heartbeat error: {}", e.getMessage()); + } + }, + HEARTBEAT_INTERVAL_SECONDS, + HEARTBEAT_INTERVAL_SECONDS, + TimeUnit.SECONDS); + } catch (RejectedExecutionException e) { + logger.debug("[RemoteTerminal] Scheduler already shutdown, skip heartbeat"); + } + } + + private void stopHeartbeat() { + if (pingFuture != null) { + pingFuture.cancel(false); + pingFuture = null; + } + } + + private void scheduleReconnect() { + if (closed || reconnecting) return; + reconnecting = true; + stopHeartbeat(); + + int attempt = reconnectAttempts.get(); + if (attempt >= MAX_RECONNECT_ATTEMPTS) { + logger.warn( + "[RemoteTerminal] Max reconnect attempts ({}) reached, giving up", + MAX_RECONNECT_ATTEMPTS); + reconnecting = false; + outputSink.tryEmitComplete(); + return; + } + + long delayMs = Math.min(1000L * (1L << attempt), MAX_BACKOFF_MS); + logger.info( + "[RemoteTerminal] Scheduling reconnect attempt {} in {}ms", attempt + 1, delayMs); + + try { + scheduler.schedule(this::doReconnect, delayMs, TimeUnit.MILLISECONDS); + } catch (RejectedExecutionException e) { + logger.debug("[RemoteTerminal] Scheduler shutdown, cannot reconnect"); + reconnecting = false; + outputSink.tryEmitComplete(); + } + } + + private void doReconnect() { + if (closed) { + reconnecting = false; + return; + } + + // 清理旧连接 + if (wsConnection != null && !wsConnection.isDisposed()) { + wsConnection.dispose(); + } + + try { + doConnect(lastCols, lastRows, true); + reconnectAttempts.set(0); + reconnecting = false; + startHeartbeat(); + logger.info("[RemoteTerminal] Reconnected successfully"); + } catch (IOException e) { + logger.warn("[RemoteTerminal] Reconnect failed: {}", e.getMessage()); + reconnectAttempts.incrementAndGet(); + reconnecting = false; + scheduleReconnect(); + } + } + + @Override + public void write(String data) throws IOException { + if (closed) throw new IOException("Remote terminal is closed"); + sendSink.tryEmitNext(data); + } + + @Override + public void resize(int cols, int rows) { + if (closed) return; + this.lastCols = cols; + this.lastRows = rows; + String resizeMsg = + String.format("{\"type\":\"resize\",\"cols\":%d,\"rows\":%d}", cols, rows); + sendSink.tryEmitNext(resizeMsg); + } + + @Override + public Flux output() { + return outputSink.asFlux(); + } + + @Override + public boolean isAlive() { + return !closed && wsSessionRef.get() != null; + } + + @Override + public void close() { + if (closed) return; + closed = true; + logger.info("[RemoteTerminal] Closing"); + stopHeartbeat(); + scheduler.shutdownNow(); + outputSink.tryEmitComplete(); + sendSink.tryEmitComplete(); + if (wsConnection != null && !wsConnection.isDisposed()) { + wsConnection.dispose(); + } + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/TerminalBackend.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/TerminalBackend.java new file mode 100644 index 000000000..d9f61e9a8 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/TerminalBackend.java @@ -0,0 +1,55 @@ +package com.alibaba.himarket.service.hicoding.terminal; + +import java.io.IOException; +import reactor.core.publisher.Flux; + +/** + * 终端后端抽象接口。 + * 统一本地 PTY 和 K8s Exec 两种终端实现的调用方式。 + */ +public interface TerminalBackend { + + /** + * 启动终端。 + * + * @param cols 终端列数 + * @param rows 终端行数 + * @throws IOException 启动失败时抛出 + */ + void start(int cols, int rows) throws IOException; + + /** + * 写入用户输入。 + * + * @param data 用户输入数据 + * @throws IOException 写入失败时抛出 + */ + void write(String data) throws IOException; + + /** + * 调整终端大小。 + * + * @param cols 新的列数 + * @param rows 新的行数 + */ + void resize(int cols, int rows); + + /** + * 终端输出的响应式流。 + * + * @return 终端输出字节数组的 Flux 流 + */ + Flux output(); + + /** + * 终端是否存活。 + * + * @return true 表示终端进程仍在运行 + */ + boolean isAlive(); + + /** + * 关闭终端,释放相关资源。 + */ + void close(); +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/TerminalProcess.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/TerminalProcess.java new file mode 100644 index 000000000..66255839c --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/TerminalProcess.java @@ -0,0 +1,166 @@ +package com.alibaba.himarket.service.hicoding.terminal; + +import com.pty4j.PtyProcess; +import com.pty4j.PtyProcessBuilder; +import com.pty4j.WinSize; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Sinks; +import reactor.core.scheduler.Scheduler; +import reactor.core.scheduler.Schedulers; + +/** + * Manages an interactive shell process with PTY support. + * Uses pty4j to provide a proper pseudo-terminal so that interactive + * programs (tab completion, colors, Ctrl+C, etc.) work correctly. + */ +public class TerminalProcess { + + private static final Logger logger = LoggerFactory.getLogger(TerminalProcess.class); + + private final String cwd; + private PtyProcess process; + private OutputStream stdin; + private volatile boolean closed = false; + + private final Sinks.Many outputSink = Sinks.many().multicast().onBackpressureBuffer(); + private Scheduler readerScheduler; + + public TerminalProcess(String cwd) { + this.cwd = cwd; + } + + /** + * Start the shell process with PTY. + */ + public void start(int initialCols, int initialRows) throws IOException { + String shell = "/bin/zsh"; + + Map env = new HashMap<>(System.getenv()); + env.put("TERM", "xterm-256color"); + env.put("COLORTERM", "truecolor"); + + PtyProcessBuilder builder = + new PtyProcessBuilder(new String[] {shell, "-l"}) + .setDirectory(cwd) + .setEnvironment(env) + .setInitialColumns(initialCols) + .setInitialRows(initialRows); + + this.process = builder.start(); + this.stdin = process.getOutputStream(); + + logger.info( + "Terminal process started: shell={}, cwd={}, pid={}, size={}x{}", + shell, + cwd, + process.pid(), + initialCols, + initialRows); + + // Background thread to read PTY output + this.readerScheduler = + Schedulers.fromExecutorService( + Executors.newSingleThreadExecutor( + r -> { + Thread t = new Thread(r, "terminal-reader"); + t.setDaemon(true); + return t; + })); + + readerScheduler.schedule( + () -> { + byte[] buffer = new byte[4096]; + try (InputStream is = process.getInputStream()) { + int bytesRead; + while (!closed && (bytesRead = is.read(buffer)) != -1) { + byte[] data = new byte[bytesRead]; + System.arraycopy(buffer, 0, data, 0, bytesRead); + outputSink.tryEmitNext(data); + } + } catch (IOException e) { + if (!closed) { + logger.error("Error reading terminal output", e); + } + } finally { + outputSink.tryEmitComplete(); + } + }); + } + + /** + * Write user input to the shell stdin. + */ + public synchronized void write(String data) throws IOException { + if (closed || stdin == null) { + throw new IOException("Terminal process is closed"); + } + stdin.write(data.getBytes(StandardCharsets.UTF_8)); + stdin.flush(); + } + + /** + * Resize the terminal. + */ + public void resize(int cols, int rows) { + if (process == null || !process.isAlive()) return; + try { + process.setWinSize(new WinSize(cols, rows)); + logger.trace("Terminal resized to {}x{}", cols, rows); + } catch (Exception e) { + logger.warn("Failed to resize terminal: {}", e.getMessage()); + } + } + + /** + * Reactive stream of terminal output (raw bytes). + */ + public Flux output() { + return outputSink.asFlux(); + } + + /** + * Check if the shell process is still alive. + */ + public boolean isAlive() { + return process != null && process.isAlive(); + } + + /** + * Gracefully close the shell process. + */ + public void close() { + if (closed) return; + closed = true; + logger.info("Closing terminal process"); + + outputSink.tryEmitComplete(); + + if (process != null) { + process.destroy(); + try { + boolean exited = process.waitFor(5, TimeUnit.SECONDS); + if (!exited) { + logger.warn("Terminal process did not exit in time, force killing"); + process.destroyForcibly(); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + logger.info( + "Terminal process stopped (exit={})", + process.isAlive() ? "still running" : process.exitValue()); + } + + if (readerScheduler != null) readerScheduler.dispose(); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/TerminalWebSocketHandler.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/TerminalWebSocketHandler.java new file mode 100644 index 000000000..9b10d75e5 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/terminal/TerminalWebSocketHandler.java @@ -0,0 +1,179 @@ +package com.alibaba.himarket.service.hicoding.terminal; + +import com.alibaba.himarket.config.AcpProperties; +import com.alibaba.himarket.service.hicoding.websocket.WebSocketPingScheduler; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.util.Base64; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; +import org.springframework.web.socket.CloseStatus; +import org.springframework.web.socket.TextMessage; +import org.springframework.web.socket.WebSocketSession; +import org.springframework.web.socket.handler.TextWebSocketHandler; +import reactor.core.Disposable; + +/** + * Terminal WebSocket handler。 + * 通过 WebSocket 连接远程 Sidecar 的 /terminal 端点提供终端功能。 + */ +@Component +public class TerminalWebSocketHandler extends TextWebSocketHandler { + + private static final Logger logger = LoggerFactory.getLogger(TerminalWebSocketHandler.class); + + private final AcpProperties acpProperties; + private final ObjectMapper objectMapper; + private final WebSocketPingScheduler pingScheduler; + private final Map backendMap = new ConcurrentHashMap<>(); + private final Map subscriptionMap = new ConcurrentHashMap<>(); + + public TerminalWebSocketHandler( + AcpProperties acpProperties, + ObjectMapper objectMapper, + WebSocketPingScheduler pingScheduler) { + this.acpProperties = acpProperties; + this.objectMapper = objectMapper; + this.pingScheduler = pingScheduler; + } + + @Override + public void afterConnectionEstablished(WebSocketSession session) throws Exception { + String userId = (String) session.getAttributes().get("userId"); + if (userId == null) { + logger.error("No userId in session attributes, closing terminal connection"); + session.close(CloseStatus.POLICY_VIOLATION); + return; + } + + String runtimeParam = (String) session.getAttributes().get("runtime"); + + logger.info( + "Terminal WebSocket connected: id={}, userId={}, runtime={}", + session.getId(), + userId, + runtimeParam); + + if (!acpProperties.getRemote().isConfigured()) { + logger.error("Remote sandbox not configured, cannot create terminal"); + session.close(CloseStatus.SERVER_ERROR); + return; + } + + String host = acpProperties.getRemote().getHost(); + int port = acpProperties.getRemote().getPort(); + String cwd = "/workspace/" + userId; + + logger.info("Creating RemoteTerminalBackend: host={}:{}, cwd={}", host, port, cwd); + TerminalBackend backend = new RemoteTerminalBackend(host, port, cwd); + + try { + backend.start(80, 24); + } catch (Exception e) { + logger.error("Failed to start remote terminal for user {}", userId, e); + session.close(CloseStatus.SERVER_ERROR); + return; + } + + backendMap.put(session.getId(), backend); + + // 启动 WebSocket 协议级 ping 定时器,保持前端连接活跃 + pingScheduler.startPing(session); + + Disposable subscription = + backend.output() + .subscribe( + data -> { + try { + if (session.isOpen()) { + String encoded = + Base64.getEncoder().encodeToString(data); + String json = + objectMapper.writeValueAsString( + Map.of( + "type", "output", "data", + encoded)); + synchronized (session) { + session.sendMessage(new TextMessage(json)); + } + } + } catch (IOException e) { + logger.error("Error sending terminal output", e); + } + }, + error -> + logger.error( + "Terminal output error for session {}", + session.getId(), + error), + () -> { + logger.info("Terminal exited for session {}", session.getId()); + try { + if (session.isOpen()) { + int exitCode = 0; + String json = + objectMapper.writeValueAsString( + Map.of( + "type", "exit", "code", + exitCode)); + synchronized (session) { + session.sendMessage(new TextMessage(json)); + } + } + } catch (IOException e) { + logger.debug("Error sending exit message", e); + } + }); + + subscriptionMap.put(session.getId(), subscription); + } + + @Override + protected void handleTextMessage(WebSocketSession session, TextMessage message) + throws Exception { + TerminalBackend backend = backendMap.get(session.getId()); + if (backend == null) return; + + String payload = message.getPayload(); + if (payload.isBlank()) return; + + JsonNode root = objectMapper.readTree(payload); + String type = root.has("type") ? root.get("type").asText() : ""; + + switch (type) { + case "input" -> { + String data = root.has("data") ? root.get("data").asText() : ""; + if (!data.isEmpty()) backend.write(data); + } + case "resize" -> { + int cols = root.has("cols") ? root.get("cols").asInt(80) : 80; + int rows = root.has("rows") ? root.get("rows").asInt(24) : 24; + backend.resize(cols, rows); + } + } + } + + @Override + public void afterConnectionClosed(WebSocketSession session, CloseStatus status) { + logger.info("Terminal closed: id={}, status={}", session.getId(), status); + cleanup(session.getId()); + } + + @Override + public void handleTransportError(WebSocketSession session, Throwable exception) { + logger.error("Terminal transport error for session {}", session.getId(), exception); + cleanup(session.getId()); + } + + private void cleanup(String sessionId) { + pingScheduler.stopPing(sessionId); + Disposable subscription = subscriptionMap.remove(sessionId); + if (subscription != null && !subscription.isDisposed()) subscription.dispose(); + TerminalBackend backend = backendMap.remove(sessionId); + if (backend != null) backend.close(); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/CliProcess.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/CliProcess.java new file mode 100644 index 000000000..e9330f6b1 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/CliProcess.java @@ -0,0 +1,213 @@ +package com.alibaba.himarket.service.hicoding.websocket; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Executors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Sinks; +import reactor.core.scheduler.Scheduler; +import reactor.core.scheduler.Schedulers; + +/** + * Manages an ACP-compatible CLI subprocess (qodercli, kiro-cli, claude-code, codex, qwen-code, etc.). + * Provides reactive stdin/stdout access for transparent proxying. + */ +public class CliProcess { + + private static final Logger logger = LoggerFactory.getLogger(CliProcess.class); + + private final String command; + private final List args; + private final String cwd; + private final Map extraEnv; + private Process process; + private OutputStream stdin; + private volatile boolean closed = false; + + private final Sinks.Many stdoutSink = Sinks.many().multicast().onBackpressureBuffer(); + private final Sinks.Many stderrSink = Sinks.many().multicast().onBackpressureBuffer(); + + private Scheduler stdoutScheduler; + private Scheduler stderrScheduler; + + public CliProcess(String command, List args, String cwd) { + this(command, args, cwd, Collections.emptyMap()); + } + + public CliProcess(String command, List args, String cwd, Map extraEnv) { + this.command = command; + this.args = args; + this.cwd = cwd; + this.extraEnv = extraEnv != null ? extraEnv : Collections.emptyMap(); + } + + /** + * Start the ACP CLI subprocess. + */ + public void start() throws IOException { + List fullCommand = new ArrayList<>(); + fullCommand.add(command); + fullCommand.addAll(args); + + logger.info("Starting ACP process: {}", String.join(" ", fullCommand)); + + ProcessBuilder pb = new ProcessBuilder(fullCommand); + pb.redirectErrorStream(false); + if (cwd != null) { + pb.directory(new java.io.File(cwd)); + } + // Merge extra environment variables (e.g. API keys for specific CLI providers) + if (!extraEnv.isEmpty()) { + pb.environment().putAll(extraEnv); + } + this.process = pb.start(); + this.stdin = process.getOutputStream(); + + // Dedicated daemon threads for reading stdout/stderr + this.stdoutScheduler = + Schedulers.fromExecutorService( + Executors.newSingleThreadExecutor( + r -> { + Thread t = new Thread(r, "cli-stdout"); + t.setDaemon(true); + return t; + })); + this.stderrScheduler = + Schedulers.fromExecutorService( + Executors.newSingleThreadExecutor( + r -> { + Thread t = new Thread(r, "cli-stderr"); + t.setDaemon(true); + return t; + })); + + // Read stdout lines → stdoutSink + stdoutScheduler.schedule( + () -> { + try (BufferedReader reader = + new BufferedReader( + new InputStreamReader( + process.getInputStream(), StandardCharsets.UTF_8))) { + String line; + while (!closed && (line = reader.readLine()) != null) { + logger.trace("STDOUT: {}", line); + stdoutSink.tryEmitNext(line); + } + } catch (IOException e) { + if (!closed) { + logger.error("Error reading stdout", e); + } + } finally { + stdoutSink.tryEmitComplete(); + } + }); + + // Read stderr lines → log + stderrScheduler.schedule( + () -> { + try (BufferedReader reader = + new BufferedReader( + new InputStreamReader( + process.getErrorStream(), StandardCharsets.UTF_8))) { + String line; + while (!closed && (line = reader.readLine()) != null) { + logger.debug("STDERR: {}", line); + stderrSink.tryEmitNext(line); + } + } catch (IOException e) { + if (!closed) { + logger.error("Error reading stderr", e); + } + } finally { + stderrSink.tryEmitComplete(); + } + }); + + logger.info("ACP process started (pid={})", process.pid()); + } + + /** + * Send a raw JSON line to the subprocess stdin. + */ + public synchronized void send(String jsonLine) throws IOException { + if (closed || stdin == null) { + throw new IOException("Process is closed"); + } + // Escape embedded newlines per ACP spec + String escaped = jsonLine.replace("\r\n", "\\n").replace("\n", "\\n").replace("\r", "\\n"); + logger.trace("STDIN: {}", escaped); + stdin.write(escaped.getBytes(StandardCharsets.UTF_8)); + stdin.write('\n'); + stdin.flush(); + } + + /** + * Reactive stream of stdout lines (each line is a JSON-RPC message from the ACP CLI). + */ + public Flux stdout() { + return stdoutSink.asFlux(); + } + + /** + * Reactive stream of stderr lines (for debugging). + */ + public Flux stderr() { + return stderrSink.asFlux(); + } + + /** + * Check if the underlying process is still alive. + */ + public boolean isAlive() { + return process != null && process.isAlive(); + } + + /** + * Return the PID of the underlying OS process. + * + * @return process id, or -1 if the process has not been started + */ + public long pid() { + return process != null ? process.pid() : -1; + } + + /** + * Gracefully close the subprocess. + */ + public void close() { + if (closed) return; + closed = true; + logger.info("Closing ACP process"); + + stdoutSink.tryEmitComplete(); + stderrSink.tryEmitComplete(); + + if (process != null) { + process.destroy(); + try { + boolean exited = process.waitFor(5, java.util.concurrent.TimeUnit.SECONDS); + if (!exited) { + logger.warn("Process did not exit in time, force killing"); + process.destroyForcibly(); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + logger.info( + "ACP process stopped (exit={})", + process.isAlive() ? "still running" : process.exitValue()); + } + + if (stdoutScheduler != null) stdoutScheduler.dispose(); + if (stderrScheduler != null) stderrScheduler.dispose(); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingConnectionManager.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingConnectionManager.java new file mode 100644 index 000000000..48359c69b --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingConnectionManager.java @@ -0,0 +1,304 @@ +package com.alibaba.himarket.service.hicoding.websocket; + +import com.alibaba.himarket.config.AcpProperties; +import com.alibaba.himarket.service.hicoding.runtime.RemoteRuntimeAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeConfig; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import jakarta.annotation.PostConstruct; +import jakarta.annotation.PreDestroy; +import java.util.Queue; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; +import reactor.core.Disposable; + +/** + * WebSocket 连接状态和资源管理器。 + * + *

从 HiCodingWebSocketHandler 中提取连接级别的状态管理和资源清理逻辑, 使 Handler 不再直接持有 + * ConcurrentHashMap 字段。 + * + *

管理的状态包括: + * + *

    + *
  • runtimeMap — session → RuntimeAdapter(CLI 运行时) + *
  • subscriptionMap — session → Disposable(stdout 订阅) + *
  • cwdMap — session → 工作目录 + *
  • userIdMap — session → 用户 ID + *
  • sandboxModeMap — session → 沙箱模式 + *
  • pendingMessageMap — session → 待转发消息队列(初始化期间缓存) + *
  • deferredInitMap — session → 延迟初始化参数 + *
  • detachedSessionMap — userId → DetachedSessionInfo(已 detach 但仍可 reattach 的会话) + *
+ */ +@Component +public class HiCodingConnectionManager { + + private static final Logger logger = LoggerFactory.getLogger(HiCodingConnectionManager.class); + private static final long DETACH_TTL_MILLIS = 10 * 60 * 1000L; // 10 分钟 + + private final ConcurrentHashMap runtimeMap = new ConcurrentHashMap<>(); + private final ConcurrentHashMap subscriptionMap = new ConcurrentHashMap<>(); + private final ConcurrentHashMap cwdMap = new ConcurrentHashMap<>(); + private final ConcurrentHashMap userIdMap = new ConcurrentHashMap<>(); + private final ConcurrentHashMap sandboxModeMap = new ConcurrentHashMap<>(); + private final ConcurrentHashMap> pendingMessageMap = + new ConcurrentHashMap<>(); + private final ConcurrentHashMap deferredInitMap = + new ConcurrentHashMap<>(); + + /** userId → 已 detach 但可 reattach 的会话信息。 */ + private final ConcurrentHashMap detachedSessionMap = + new ConcurrentHashMap<>(); + + /** 定期清理过期 detached 会话的调度器。 */ + private final ScheduledExecutorService cleanupScheduler = + Executors.newSingleThreadScheduledExecutor( + r -> { + Thread t = new Thread(r, "detach-cleanup"); + t.setDaemon(true); + return t; + }); + + /** + * 延迟初始化上下文:当 WebSocket 握手时 URL 中没有 cliSessionConfig, 等待前端通过 session/config + * 消息发送配置后再启动 pipeline。 存储 afterConnectionEstablished 中解析好的参数,供 session/config + * 消息到达时使用。 + */ + public record DeferredInitParams( + String userId, + String providerKey, + RuntimeConfig config, + AcpProperties.CliProviderConfig providerConfig, + SandboxType sandboxType) {} + + /** + * 已 detach 的会话信息,存储在 detachedSessionMap 中。 当前端 WebSocket 重连时,可通过 userId + * 查找并 reattach。 + */ + public record DetachedSessionInfo( + String sidecarSessionId, + RuntimeAdapter adapter, + String cwd, + String sandboxMode, + long detachedAtMillis) {} + + @PostConstruct + void startCleanup() { + cleanupScheduler.scheduleAtFixedRate( + this::cleanupExpiredDetachedSessions, 1, 1, TimeUnit.MINUTES); + } + + @PreDestroy + void shutdown() { + cleanupScheduler.shutdownNow(); + } + + private void cleanupExpiredDetachedSessions() { + long now = System.currentTimeMillis(); + detachedSessionMap.forEach( + (userId, info) -> { + if (now - info.detachedAtMillis() > DETACH_TTL_MILLIS) { + DetachedSessionInfo removed = detachedSessionMap.remove(userId); + if (removed != null && removed.adapter() != null) { + removed.adapter().close(); + logger.info( + "Cleaned up expired detached session: userId={}, age={}s", + userId, + (now - removed.detachedAtMillis()) / 1000); + } + } + }); + } + + /** + * 注册新连接。在 WebSocket 连接建立时调用,初始化该 session 的所有状态映射。 + * + * @param sessionId WebSocket session ID + * @param userId 用户 ID + * @param cwd 工作目录 + * @param sandboxMode 沙箱模式(可为 null) + */ + public void registerConnection( + String sessionId, String userId, String cwd, String sandboxMode) { + userIdMap.put(sessionId, userId); + cwdMap.put(sessionId, cwd); + sandboxModeMap.put(sessionId, sandboxMode != null ? sandboxMode : ""); + pendingMessageMap.put(sessionId, new ConcurrentLinkedQueue<>()); + } + + /** + * 注册初始化成功后的运行时资源。在沙箱初始化完成后调用。 + * + * @param sessionId WebSocket session ID + * @param adapter CLI 运行时适配器 + * @param subscription stdout 订阅句柄 + */ + public void registerRuntime(String sessionId, RuntimeAdapter adapter, Disposable subscription) { + runtimeMap.put(sessionId, adapter); + subscriptionMap.put(sessionId, subscription); + } + + /** + * 清理连接资源。在 WebSocket 连接关闭时调用。 + * + *

如果 RuntimeAdapter 是 RemoteRuntimeAdapter 且已获取到 sidecarSessionId, + * 则执行 detach 操作(保留 sidecar 进程),而非直接 close。 detach 后的会话存储在 detachedSessionMap + * 中,等待前端重连。 + * + *

清理顺序:pendingMessages → deferredInit → subscription(dispose) → runtime(detach 或 close) + * → cwd → userId → sandboxMode + * + * @param sessionId WebSocket session ID + */ + public void cleanup(String sessionId) { + pendingMessageMap.remove(sessionId); + deferredInitMap.remove(sessionId); + + Disposable subscription = subscriptionMap.remove(sessionId); + if (subscription != null && !subscription.isDisposed()) { + subscription.dispose(); + } + + RuntimeAdapter runtime = runtimeMap.remove(sessionId); + String userId = userIdMap.remove(sessionId); + String cwd = cwdMap.remove(sessionId); + String sandboxMode = sandboxModeMap.remove(sessionId); + + // 如果是 RemoteRuntimeAdapter 且有 sidecarSessionId,执行 detach 而非 close + if (runtime instanceof RemoteRuntimeAdapter remoteAdapter + && remoteAdapter.getSidecarSessionId() != null + && userId != null) { + remoteAdapter.detach(); + detachedSessionMap.put( + userId, + new DetachedSessionInfo( + remoteAdapter.getSidecarSessionId(), + runtime, + cwd, + sandboxMode != null ? sandboxMode : "", + System.currentTimeMillis())); + logger.info( + "Session detached for userId={}, sidecarSessionId={}", + userId, + remoteAdapter.getSidecarSessionId()); + return; + } + + // 非 Remote 或无 sidecarSessionId,直接 close + if (runtime != null) { + runtime.close(); + } + } + + /** + * 原子地获取并移除指定用户的 detached 会话。 如果存在则返回 DetachedSessionInfo,否则返回 null。 + * + * @param userId 用户 ID + * @return DetachedSessionInfo,不存在时返回 null + */ + public DetachedSessionInfo takeDetachedSession(String userId) { + return detachedSessionMap.remove(userId); + } + + /** + * 查看指定用户是否有 detached 会话(不移除)。 + * + * @param userId 用户 ID + * @return DetachedSessionInfo,不存在时返回 null + */ + public DetachedSessionInfo peekDetachedSession(String userId) { + return detachedSessionMap.get(userId); + } + + /** + * 强制销毁指定用户的 detached 会话。 + * + * @param userId 用户 ID + */ + public void destroyDetachedSession(String userId) { + DetachedSessionInfo detached = detachedSessionMap.remove(userId); + if (detached != null && detached.adapter() != null) { + detached.adapter().close(); + logger.info( + "Destroyed detached session for userId={}, sidecarSessionId={}", + userId, + detached.sidecarSessionId()); + } + } + + /** + * 获取指定 session 的 RuntimeAdapter。 + * + * @param sessionId WebSocket session ID + * @return RuntimeAdapter,未初始化时返回 null + */ + public RuntimeAdapter getRuntime(String sessionId) { + return runtimeMap.get(sessionId); + } + + /** + * 获取指定 session 的用户 ID。 + * + * @param sessionId WebSocket session ID + * @return 用户 ID,未注册时返回 null + */ + public String getUserId(String sessionId) { + return userIdMap.get(sessionId); + } + + /** + * 获取指定 session 的工作目录。 + * + * @param sessionId WebSocket session ID + * @return 工作目录路径,未注册时返回 null + */ + public String getCwd(String sessionId) { + return cwdMap.get(sessionId); + } + + /** + * 获取指定 session 的待转发消息队列。 + * + * @param sessionId WebSocket session ID + * @return 消息队列,未注册时返回 null + */ + public Queue getPendingMessages(String sessionId) { + return pendingMessageMap.get(sessionId); + } + + /** + * 移除指定 session 的待转发消息队列。 在沙箱初始化完成后调用,标记该 session 不再需要缓存消息。 + * + * @param sessionId WebSocket session ID + */ + public void removePendingMessages(String sessionId) { + pendingMessageMap.remove(sessionId); + } + + /** + * 原子地获取并移除指定 session 的延迟初始化参数。 使用 remove() 而非 get(),确保参数只被消费一次。 + * + * @param sessionId WebSocket session ID + * @return 延迟初始化参数,不存在时返回 null + */ + public DeferredInitParams getDeferredInit(String sessionId) { + return deferredInitMap.remove(sessionId); + } + + /** + * 设置指定 session 的延迟初始化参数。 + * + * @param sessionId WebSocket session ID + * @param params 延迟初始化参数 + */ + public void setDeferredInit(String sessionId, DeferredInitParams params) { + deferredInitMap.put(sessionId, params); + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingHandshakeInterceptor.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingHandshakeInterceptor.java new file mode 100644 index 000000000..a4cc3ea0b --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingHandshakeInterceptor.java @@ -0,0 +1,87 @@ +package com.alibaba.himarket.service.hicoding.websocket; + +import cn.hutool.core.util.StrUtil; +import com.alibaba.himarket.core.utils.TokenUtil; +import com.alibaba.himarket.support.common.User; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.server.ServerHttpRequest; +import org.springframework.http.server.ServerHttpResponse; +import org.springframework.http.server.ServletServerHttpRequest; +import org.springframework.stereotype.Component; +import org.springframework.web.socket.WebSocketHandler; +import org.springframework.web.socket.server.HandshakeInterceptor; +import org.springframework.web.util.UriComponentsBuilder; + +@Component +public class HiCodingHandshakeInterceptor implements HandshakeInterceptor { + + private static final Logger logger = + LoggerFactory.getLogger(HiCodingHandshakeInterceptor.class); + + @Override + public boolean beforeHandshake( + ServerHttpRequest request, + ServerHttpResponse response, + WebSocketHandler wsHandler, + Map attributes) + throws Exception { + String token = null; + + // 1. Try query param: ?token=xxx (primary for WebSocket since browsers can't set headers) + try { + var params = UriComponentsBuilder.fromUri(request.getURI()).build().getQueryParams(); + token = params.getFirst("token"); + + // Extract CLI provider selection from query param: ?provider=kiro-cli + String provider = params.getFirst("provider"); + if (StrUtil.isNotBlank(provider)) { + attributes.put("provider", provider); + } + + // Extract runtime type from query param: ?runtime=local|k8s + String runtime = params.getFirst("runtime"); + if (StrUtil.isNotBlank(runtime)) { + attributes.put("runtime", runtime); + } + + // Extract sandbox mode from query param: ?sandboxMode=user|session + String sandboxMode = params.getFirst("sandboxMode"); + if (StrUtil.isNotBlank(sandboxMode)) { + attributes.put("sandboxMode", sandboxMode); + } + } catch (Exception e) { + logger.debug("Failed to parse token from query param", e); + } + + // 2. Fallback to Authorization header / cookie (via TokenUtil) + if (StrUtil.isBlank(token) && request instanceof ServletServerHttpRequest servletRequest) { + token = TokenUtil.getTokenFromRequest(servletRequest.getServletRequest()); + } + + if (StrUtil.isBlank(token)) { + logger.warn("WebSocket handshake rejected: missing token"); + return false; + } + + try { + User user = TokenUtil.parseUser(token); + attributes.put("userId", user.getUserId()); + logger.info("WebSocket handshake authenticated: userId={}", user.getUserId()); + return true; + } catch (Exception e) { + logger.warn("WebSocket handshake rejected: invalid token - {}", e.getMessage()); + return false; + } + } + + @Override + public void afterHandshake( + ServerHttpRequest request, + ServerHttpResponse response, + WebSocketHandler wsHandler, + Exception exception) { + // No-op + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingMessageRouter.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingMessageRouter.java new file mode 100644 index 000000000..912b07c78 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingMessageRouter.java @@ -0,0 +1,116 @@ +package com.alibaba.himarket.service.hicoding.websocket; + +import com.alibaba.himarket.service.hicoding.runtime.RuntimeAdapter; +import java.io.IOException; +import java.util.Queue; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; +import org.springframework.web.socket.CloseStatus; +import org.springframework.web.socket.TextMessage; +import org.springframework.web.socket.WebSocketSession; +import reactor.core.Disposable; + +/** + * 前端 ↔ CLI 双向消息路由器。 + * + *

从 HiCodingWebSocketHandler 中提取消息转发逻辑,职责包括: + * + *

    + *
  • 订阅 CLI stdout 流并转发到前端 WebSocket + *
  • 将前端消息转发到 CLI 进程 + *
  • 回放初始化期间缓存的待转发消息 + *
+ */ +@Component +public class HiCodingMessageRouter { + + private static final Logger logger = LoggerFactory.getLogger(HiCodingMessageRouter.class); + + /** + * 订阅 CLI stdout 并转发到前端 WebSocket。 + * + *

从 HiCodingWebSocketHandler.initSandboxAsync() 中提取的 stdout 订阅逻辑。 当 stdout + * 流完成时,会自动关闭对应的 WebSocket session。 + * + * @param adapter CLI 运行时适配器 + * @param session 前端 WebSocket session + * @return Disposable 订阅句柄(用于取消订阅) + */ + public Disposable subscribeAndForward(RuntimeAdapter adapter, WebSocketSession session) { + return adapter.stdout() + .subscribe( + line -> sendToFrontend(session, line), + error -> + logger.error( + "Stdout stream error for session {}", + session.getId(), + error), + () -> { + logger.info("Stdout stream completed for session {}", session.getId()); + try { + if (session.isOpen()) { + session.close(CloseStatus.NORMAL); + } + } catch (IOException e) { + logger.debug("Error closing WebSocket after stdout completion", e); + } + }); + } + + /** + * 将前端消息转发到 CLI 进程。 + * + * @param adapter CLI 运行时适配器 + * @param message 前端发送的消息内容 + * @throws IOException 发送失败时抛出 + */ + public void forwardToCliAgent(RuntimeAdapter adapter, String message) throws IOException { + adapter.send(message); + } + + /** + * 回放初始化期间缓存的待转发消息。 + * + *

从 HiCodingWebSocketHandler.replayPendingMessages() 迁移。 注意:本方法直接回放消息,不做消息变换。 + * 如需对消息进行重写(如 rewriteSessionNewCwd),由调用方在入队前或调用前处理。 + * + * @param session 前端 WebSocket session(用于日志记录) + * @param adapter CLI 运行时适配器 + * @param pending 待回放的消息队列 + */ + public void replayPendingMessages( + WebSocketSession session, RuntimeAdapter adapter, Queue pending) { + if (pending == null) { + return; + } + String queued; + while ((queued = pending.poll()) != null) { + try { + adapter.send(queued); + } catch (IOException e) { + logger.error("Error replaying queued message for session {}", session.getId(), e); + } + } + } + + /** + * 将消息发送到前端 WebSocket session。 + * + *

使用 synchronized 保证同一 session 的消息发送顺序。 + * + * @param session 前端 WebSocket session + * @param message 要发送的消息内容 + */ + private void sendToFrontend(WebSocketSession session, String message) { + try { + if (session.isOpen()) { + synchronized (session) { + session.sendMessage(new TextMessage(message)); + } + } + } catch (IOException e) { + logger.error("Error sending message to WebSocket", e); + } + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingWebSocketHandler.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingWebSocketHandler.java new file mode 100644 index 000000000..0efe94972 --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingWebSocketHandler.java @@ -0,0 +1,715 @@ +package com.alibaba.himarket.service.hicoding.websocket; + +import com.alibaba.himarket.config.AcpProperties; +import com.alibaba.himarket.config.AcpProperties.CliProviderConfig; +import com.alibaba.himarket.service.hicoding.runtime.RemoteRuntimeAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeConfig; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxInfo; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import com.alibaba.himarket.service.hicoding.session.CliSessionConfig; +import com.alibaba.himarket.service.hicoding.session.SessionInitializer; +import com.alibaba.himarket.service.hicoding.session.SessionInitializer.InitializationResult; +import com.alibaba.himarket.service.hicoding.websocket.HiCodingConnectionManager.DeferredInitParams; +import com.alibaba.himarket.service.hicoding.websocket.HiCodingConnectionManager.DetachedSessionInfo; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import jakarta.annotation.PreDestroy; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; +import org.springframework.web.socket.CloseStatus; +import org.springframework.web.socket.TextMessage; +import org.springframework.web.socket.WebSocketSession; +import org.springframework.web.socket.handler.TextWebSocketHandler; +import reactor.core.Disposable; + +/** + * WebSocket 事件入口。 + * 仅负责接收 WebSocket 事件并委托给专门的组件处理。 + * + *

职责: + *

    + *
  • afterConnectionEstablished → 委托 HiCodingConnectionManager + SessionInitializer + *
  • handleTextMessage → 委托 HiCodingMessageRouter + *
  • afterConnectionClosed / handleTransportError → 委托 HiCodingConnectionManager.cleanup() + *
+ */ +@Component +public class HiCodingWebSocketHandler extends TextWebSocketHandler { + + private static final Logger logger = LoggerFactory.getLogger(HiCodingWebSocketHandler.class); + private static final String SESSION_NEW_METHOD = "session/new"; + private static final String SESSION_LOAD_METHOD = "session/load"; + + private final AcpProperties properties; + private final ObjectMapper objectMapper; + private final SessionInitializer sessionInitializer; + private final HiCodingMessageRouter messageRouter; + private final HiCodingConnectionManager connectionManager; + private final WebSocketPingScheduler pingScheduler; + + /** 异步初始化线程池(有界) */ + private final ExecutorService podInitExecutor = + new ThreadPoolExecutor( + 4, + 32, + 60L, + TimeUnit.SECONDS, + new LinkedBlockingQueue<>(256), + r -> { + Thread t = new Thread(r, "sandbox-init"); + t.setDaemon(true); + return t; + }, + new ThreadPoolExecutor.CallerRunsPolicy()); + + public HiCodingWebSocketHandler( + AcpProperties properties, + ObjectMapper objectMapper, + SessionInitializer sessionInitializer, + HiCodingMessageRouter messageRouter, + HiCodingConnectionManager connectionManager, + WebSocketPingScheduler pingScheduler) { + this.properties = properties; + this.objectMapper = objectMapper; + this.sessionInitializer = sessionInitializer; + this.messageRouter = messageRouter; + this.connectionManager = connectionManager; + this.pingScheduler = pingScheduler; + } + + @PreDestroy + void shutdown() { + podInitExecutor.shutdownNow(); + } + + @Override + public void afterConnectionEstablished(WebSocketSession session) throws Exception { + String userId = (String) session.getAttributes().get("userId"); + if (userId == null) { + logger.error("No userId in session attributes, closing connection"); + session.close(CloseStatus.POLICY_VIOLATION); + return; + } + + // Resolve sandbox type from session attributes (set by HiCodingHandshakeInterceptor) + String runtimeParam = (String) session.getAttributes().get("runtime"); + SandboxType sandboxType = resolveSandboxType(runtimeParam); + + // Build per-user working directory (always remote sandbox path) + String cwd = "/workspace/" + userId; + + logger.info( + "WebSocket connected: id={}, userId={}, cwd={}, sandboxType={}", + session.getId(), + userId, + cwd, + sandboxType); + + // Resolve CLI provider: prefer query param, fallback to default + String providerKey = (String) session.getAttributes().get("provider"); + if (providerKey == null || providerKey.isBlank()) { + providerKey = properties.getDefaultProvider(); + } + CliProviderConfig providerConfig = properties.getProvider(providerKey); + if (providerConfig == null) { + logger.error("Unknown CLI provider '{}', closing connection", providerKey); + session.close(CloseStatus.POLICY_VIOLATION); + return; + } + + logger.info( + "Using CLI provider '{}' (command={})", providerKey, providerConfig.getCommand()); + + // Build RuntimeConfig from provider configuration + RuntimeConfig config = + buildRuntimeConfig(userId, providerKey, providerConfig, cwd, sandboxType); + + // Resolve sandboxMode from session attributes (set by HiCodingHandshakeInterceptor) + String sandboxMode = (String) session.getAttributes().get("sandboxMode"); + + // 注册连接 + connectionManager.registerConnection(session.getId(), userId, cwd, sandboxMode); + + // 等待 session/config 消息到达后再启动 pipeline + connectionManager.setDeferredInit( + session.getId(), + new DeferredInitParams(userId, providerKey, config, providerConfig, sandboxType)); + logger.info( + "Deferring pipeline init until session/config message: session={}", + session.getId()); + // Non-blocking return for all sandbox types + + // 启动 WebSocket 协议级 ping 定时器,保持前端连接活跃 + pingScheduler.startPing(session); + } + + @Override + protected void handleTextMessage(WebSocketSession session, TextMessage message) + throws Exception { + String payload = message.getPayload(); + if (payload.isBlank()) { + logger.trace("Ignoring blank message from session {}", session.getId()); + return; + } + + // 拦截 session/config 消息:前端连接后发送的配置(替代 URL query string 传递) + // 必须在 pendingMessageMap 检查之前处理,因为此时 pipeline 尚未启动 + DeferredInitParams deferred = connectionManager.getDeferredInit(session.getId()); + if (deferred != null) { + CliSessionConfig sessionConfig = null; + try { + JsonNode root = objectMapper.readTree(payload); + JsonNode methodNode = root.get("method"); + if (methodNode != null && "session/config".equals(methodNode.asText())) { + JsonNode params = root.get("params"); + if (params != null) { + sessionConfig = objectMapper.treeToValue(params, CliSessionConfig.class); + logger.info( + "Received session/config via WebSocket message: session={}," + + " hasModel={}, mcpCount={}, skillCount={}, hasAuthToken={}", + session.getId(), + sessionConfig.getModelProductId() != null, + sessionConfig.getMcpServers() != null + ? sessionConfig.getMcpServers().size() + : 0, + sessionConfig.getSkills() != null + ? sessionConfig.getSkills().size() + : 0, + sessionConfig.getAuthToken() != null + && !sessionConfig.getAuthToken().isEmpty()); + } + } + } catch (Exception e) { + logger.warn( + "Failed to parse session/config message, proceeding with null config: {}", + e.getMessage()); + } + + // 无论是否成功解析 session/config,都启动 pipeline(config 可以为 null) + final CliSessionConfig fSessionConfig = sessionConfig; + final DeferredInitParams fDeferred = deferred; + podInitExecutor.submit( + () -> + doInitialize( + session, + fDeferred.userId(), + fDeferred.providerKey(), + fDeferred.config(), + fDeferred.providerConfig(), + fSessionConfig, + fDeferred.sandboxType())); + + // 如果这条消息是 session/config,已处理完毕,不需要转发给 CLI + if (sessionConfig != null) { + return; + } + // 不是 session/config 消息,fall through 到下面的 pendingQueue 缓存逻辑 + } + + // 如果 Pod 还在异步初始化中,缓存消息 + Queue pendingQueue = connectionManager.getPendingMessages(session.getId()); + if (pendingQueue != null) { + pendingQueue.add(payload); + logger.debug("Pod initializing, queued message for session {}", session.getId()); + return; + } + + RuntimeAdapter runtime = connectionManager.getRuntime(session.getId()); + if (runtime == null) { + logger.warn("No runtime for session {}", session.getId()); + return; + } + + logger.debug("Inbound [{}]: {}", session.getId(), payload); + + // Rewrite cwd in session/new and session/load requests to the absolute workspace path + payload = rewriteSessionCwd(session.getId(), payload); + + try { + messageRouter.forwardToCliAgent(runtime, payload); + } catch (IOException e) { + logger.error("Error writing to runtime stdin for session {}", session.getId(), e); + } + } + + @Override + public void afterConnectionClosed(WebSocketSession session, CloseStatus status) + throws Exception { + logger.info("WebSocket closed: id={}, status={}", session.getId(), status); + pingScheduler.stopPing(session.getId()); + connectionManager.cleanup(session.getId()); + } + + @Override + public void handleTransportError(WebSocketSession session, Throwable exception) + throws Exception { + logger.error("WebSocket transport error for session {}", session.getId(), exception); + pingScheduler.stopPing(session.getId()); + connectionManager.cleanup(session.getId()); + } + + /** + * 在 podInitExecutor 线程池中执行沙箱初始化。 + * 委托 SessionInitializer 完成初始化,根据结果处理成功/失败。 + */ + private void doInitialize( + WebSocketSession session, + String userId, + String providerKey, + RuntimeConfig config, + CliProviderConfig providerConfig, + CliSessionConfig sessionConfig, + SandboxType sandboxType) { + try { + // 优先尝试 reattach 到已有的 detached 会话 + DetachedSessionInfo detached = connectionManager.takeDetachedSession(userId); + if (detached != null + && detached.adapter() instanceof RemoteRuntimeAdapter remoteAdapter) { + try { + doReattach(session, userId, detached, remoteAdapter); + return; + } catch (Exception e) { + logger.warn( + "[Sandbox-Init] Reattach 失败,回退到完整初始化: userId={}, error={}", + userId, + e.getMessage()); + try { + remoteAdapter.close(); + } catch (Exception closeEx) { + logger.debug("Error closing failed reattach adapter", closeEx); + } + } + } else if (detached != null) { + detached.adapter().close(); + } + + logger.info( + "[Sandbox-Init] 开始异步沙箱初始化: userId={}, session={}, type={}", + userId, + session.getId(), + sandboxType); + sendSandboxStatus(session, "creating", "正在准备沙箱环境..."); + sendInitProgress(session, "sandbox-acquire", "executing", "正在获取沙箱实例...", 0, 5, 0); + + // 推送进度:沙箱获取中 + sendInitProgress(session, "sandbox-acquire", "executing", "正在获取沙箱实例...", 10, 5, 0); + + InitializationResult result = + sessionInitializer.initialize( + userId, + providerKey, + providerConfig, + config, + sessionConfig, + sandboxType, + session); + + if (!session.isOpen()) { + logger.warn("[Sandbox-Init] WebSocket 已关闭,放弃后续处理: userId={}", userId); + return; + } + + if (result.success()) { + RuntimeAdapter adapter = result.adapter(); + + // 订阅 stdout 并转发到前端 + Disposable subscription = messageRouter.subscribeAndForward(adapter, session); + + // 注册运行时资源 + connectionManager.registerRuntime(session.getId(), adapter, subscription); + + // 推送就绪通知 + SandboxInfo sInfo = result.sandboxInfo(); + String sandboxHost = + sInfo != null && sInfo.host() != null && !sInfo.host().isBlank() + ? sInfo.host() + : null; + + sendSandboxStatus(session, "ready", "沙箱环境已就绪", sandboxHost); + sendInitProgress(session, "cli-ready", "completed", "沙箱环境已就绪", 100, 5, 5); + logger.info( + "[Sandbox-Init] 已发送 sandbox/status: ready, sandboxHost={}", sandboxHost); + + // 通知前端实际使用的工作目录 + String cwd = connectionManager.getCwd(session.getId()); + if (cwd != null) { + sendWorkspaceInfo(session, cwd); + } + + // 回放缓存的消息(先对每条消息做 rewriteSessionCwd 变换) + Queue pendingQueue = connectionManager.getPendingMessages(session.getId()); + if (pendingQueue != null) { + Queue rewrittenQueue = new ConcurrentLinkedQueue<>(); + String queued; + while ((queued = pendingQueue.poll()) != null) { + rewrittenQueue.add(rewriteSessionCwd(session.getId(), queued)); + } + messageRouter.replayPendingMessages(session, adapter, rewrittenQueue); + } + // 移除 pendingMessageMap 标记,后续消息直接转发 + connectionManager.removePendingMessages(session.getId()); + + } else { + // 发送详细错误信息,不主动关闭 WebSocket,由前端决定后续行为 + sendSandboxError(session, result, sandboxType); + } + } catch (Exception e) { + logger.error("[Sandbox-Init] 沙箱初始化异常: userId={}, error={}", userId, e.getMessage(), e); + connectionManager.removePendingMessages(session.getId()); + sendSandboxStatus(session, "error", "沙箱创建失败: " + e.getMessage()); + try { + if (session.isOpen()) { + session.close(CloseStatus.SERVER_ERROR); + } + } catch (IOException closeEx) { + logger.debug("Error closing WebSocket after sandbox init failure", closeEx); + } + } + } + + /** + * 重新连接到已 detach 的 sidecar 会话。 + * 跳过完整的沙箱初始化流程(acquire、config injection 等), + * 直接调用 adapter.reconnect() 恢复 WebSocket 连接。 + */ + private void doReattach( + WebSocketSession session, + String userId, + DetachedSessionInfo detached, + RemoteRuntimeAdapter remoteAdapter) { + logger.info( + "[Sandbox-Init] 尝试 reattach: userId={}, sidecarSessionId={}", + userId, + detached.sidecarSessionId()); + + sendSandboxStatus(session, "reattaching", "正在恢复已有会话..."); + + // 重新连接到 sidecar(使用 sessionId attach 模式) + remoteAdapter.reconnect(); + + // 等待片刻让 reactor 线程处理可能的立即关闭信号 + // (sidecar 会话过期时会在握手后立即关闭连接) + try { + Thread.sleep(500); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + + // 验证重连后连接是否仍然存活 + if (!remoteAdapter.isAlive()) { + throw new RuntimeException( + "Sidecar connection closed immediately after reattach," + + " session may have expired on sidecar side"); + } + + if (!session.isOpen()) { + logger.warn("[Sandbox-Init] WebSocket 已关闭,放弃 reattach: userId={}", userId); + remoteAdapter.detach(); + // 放回 detachedSessionMap 以便下次重试 + connectionManager.takeDetachedSession(userId); // 确保没有残留 + return; + } + + // 先通知前端:已恢复连接(在订阅 stdout 之前,确保前端先收到状态通知) + sendSandboxStatus(session, "ready", "已恢复已有会话"); + sendInitProgress(session, "cli-ready", "completed", "已恢复已有会话", 100, 5, 5); + + // 通知前端实际使用的工作目录 + String cwd = connectionManager.getCwd(session.getId()); + if (cwd != null) { + sendWorkspaceInfo(session, cwd); + } + + // 通知前端这是一次 reattach + sendReattachNotification(session, detached.sidecarSessionId()); + + // 订阅 stdout 并转发到前端 + // 放在所有通知之后:避免 sidecar 残留消息在前端未就绪时到达 + Disposable subscription = messageRouter.subscribeAndForward(remoteAdapter, session); + + // 注册运行时资源 + connectionManager.registerRuntime(session.getId(), remoteAdapter, subscription); + + // 回放缓存的消息 + Queue pendingQueue = connectionManager.getPendingMessages(session.getId()); + if (pendingQueue != null) { + Queue rewrittenQueue = new ConcurrentLinkedQueue<>(); + String queued; + while ((queued = pendingQueue.poll()) != null) { + rewrittenQueue.add(rewriteSessionCwd(session.getId(), queued)); + } + messageRouter.replayPendingMessages(session, remoteAdapter, rewrittenQueue); + } + connectionManager.removePendingMessages(session.getId()); + + logger.info( + "[Sandbox-Init] Reattach 成功: userId={}, sidecarSessionId={}", + userId, + detached.sidecarSessionId()); + } + + /** + * 发送详细的沙箱初始化错误信息(适配 InitializationResult)。 + * 包含 failedPhase、errorMessage、retryable、diagnostics。 + */ + private void sendSandboxError( + WebSocketSession session, InitializationResult result, SandboxType sandboxType) { + try { + if (!session.isOpen()) return; + ObjectNode notification = objectMapper.createObjectNode(); + notification.put("jsonrpc", "2.0"); + notification.put("method", "sandbox/status"); + ObjectNode params = objectMapper.createObjectNode(); + params.put("status", "error"); + params.put( + "message", + "沙箱初始化失败: " + (result.errorMessage() != null ? result.errorMessage() : "未知错误")); + params.put("failedPhase", result.failedPhase()); + params.put("sandboxType", sandboxType.getValue()); + params.put("retryable", result.retryable()); + + // diagnostics + ObjectNode diagnostics = objectMapper.createObjectNode(); + List completedPhases = new ArrayList<>(); + // InitializationResult 不直接持有 phaseDurations,使用 failedPhase 推断 + diagnostics.set("completedPhases", objectMapper.valueToTree(completedPhases)); + if (result.totalDuration() != null) { + diagnostics.put( + "totalDuration", + String.format("%.1fs", result.totalDuration().toMillis() / 1000.0)); + } + diagnostics.put("suggestion", buildSuggestion(result)); + params.set("diagnostics", diagnostics); + + notification.set("params", params); + synchronized (session) { + session.sendMessage(new TextMessage(objectMapper.writeValueAsString(notification))); + } + logger.error( + "[Sandbox-Init] 发送错误通知: failedPhase={}, retryable={}, message={}", + result.failedPhase(), + result.retryable(), + result.errorMessage()); + } catch (Exception e) { + logger.warn("Failed to send sandbox error notification: {}", e.getMessage()); + } + } + + /** + * 根据失败阶段生成针对性的排查建议。 + */ + private String buildSuggestion(InitializationResult result) { + if (result.failedPhase() == null) { + return "请检查沙箱配置或联系管理员"; + } + return switch (result.failedPhase()) { + case "filesystem-ready" -> + "沙箱服务不可达,请检查: 1) sidecar 服务是否已启动 " + + "2) ACP_REMOTE_HOST 和 ACP_REMOTE_PORT 配置是否正确 " + + "3) 网络是否可达"; + case "sandbox-acquire" -> "沙箱实例获取失败,请检查沙箱配置或联系管理员"; + case "config-injection" -> "配置注入失败,请重试连接"; + case "sidecar-connect" -> "Sidecar WebSocket 连接失败,请检查 sidecar 服务状态后重试"; + case "cli-ready" -> "CLI 工具启动失败,请检查 CLI 命令配置是否正确"; + default -> result.retryable() ? "请重试连接" : "请检查沙箱配置或联系管理员"; + }; + } + + /** + * 向前端推送初始化进度消息。 + */ + private void sendInitProgress( + WebSocketSession session, + String phase, + String status, + String message, + int progress, + int totalPhases, + int completedPhases) { + try { + if (!session.isOpen()) return; + ObjectNode notification = objectMapper.createObjectNode(); + notification.put("jsonrpc", "2.0"); + notification.put("method", "sandbox/init-progress"); + ObjectNode params = objectMapper.createObjectNode(); + params.put("phase", phase); + params.put("status", status); + params.put("message", message); + params.put("progress", progress); + params.put("totalPhases", totalPhases); + params.put("completedPhases", completedPhases); + notification.set("params", params); + synchronized (session) { + session.sendMessage(new TextMessage(objectMapper.writeValueAsString(notification))); + } + } catch (Exception e) { + logger.warn("Failed to send init progress notification: {}", e.getMessage()); + } + } + + /** + * 向前端推送沙箱状态通知(JSON-RPC notification)。 + */ + private void sendSandboxStatus(WebSocketSession session, String status, String message) { + sendSandboxStatus(session, status, message, null); + } + + private void sendSandboxStatus( + WebSocketSession session, String status, String message, String sandboxHost) { + try { + if (!session.isOpen()) return; + ObjectNode notification = objectMapper.createObjectNode(); + notification.put("jsonrpc", "2.0"); + notification.put("method", "sandbox/status"); + ObjectNode params = objectMapper.createObjectNode(); + params.put("status", status); + params.put("message", message); + if (sandboxHost != null && !sandboxHost.isBlank()) { + params.put("sandboxHost", sandboxHost); + } + notification.set("params", params); + synchronized (session) { + session.sendMessage(new TextMessage(objectMapper.writeValueAsString(notification))); + } + } catch (Exception e) { + logger.warn("Failed to send sandbox status notification: {}", e.getMessage()); + } + } + + /** + * 向前端推送工作目录信息通知(JSON-RPC notification)。 + */ + private void sendWorkspaceInfo(WebSocketSession session, String cwd) { + try { + if (!session.isOpen()) return; + ObjectNode notification = objectMapper.createObjectNode(); + notification.put("jsonrpc", "2.0"); + notification.put("method", "workspace/info"); + ObjectNode params = objectMapper.createObjectNode(); + params.put("cwd", cwd); + notification.set("params", params); + synchronized (session) { + session.sendMessage(new TextMessage(objectMapper.writeValueAsString(notification))); + } + } catch (Exception e) { + logger.warn("Failed to send workspace info notification: {}", e.getMessage()); + } + } + + /** + * 向前端推送 reattach 通知,告知前端此次连接恢复了已有的 sidecar 会话。 + */ + private void sendReattachNotification(WebSocketSession session, String sidecarSessionId) { + try { + if (!session.isOpen()) return; + ObjectNode notification = objectMapper.createObjectNode(); + notification.put("jsonrpc", "2.0"); + notification.put("method", "sandbox/reattached"); + ObjectNode params = objectMapper.createObjectNode(); + params.put("sidecarSessionId", sidecarSessionId); + params.put("reattached", true); + notification.set("params", params); + synchronized (session) { + session.sendMessage(new TextMessage(objectMapper.writeValueAsString(notification))); + } + } catch (Exception e) { + logger.warn("Failed to send reattach notification: {}", e.getMessage()); + } + } + + /** + * Resolve the runtime type from the query parameter string. + * Defaults to REMOTE if the parameter is null, blank, or unrecognized. + */ + SandboxType resolveSandboxType(String runtimeParam) { + if (runtimeParam == null || runtimeParam.isBlank()) { + return SandboxType.REMOTE; + } + try { + return SandboxType.fromValue(runtimeParam); + } catch (IllegalArgumentException e) { + logger.warn("Unknown sandbox type '{}', defaulting to REMOTE", runtimeParam); + return SandboxType.REMOTE; + } + } + + /** + * Build a RuntimeConfig from the CLI provider configuration and session context. + */ + private RuntimeConfig buildRuntimeConfig( + String userId, + String providerKey, + CliProviderConfig providerConfig, + String cwd, + SandboxType sandboxType) { + Map processEnv = + providerConfig.getEnv() != null + ? new HashMap<>(providerConfig.getEnv()) + : new HashMap<>(); + // 将 HOME 指向用户工作目录,确保 CLI 工具的会话文件(JSONL 等) + // 存储在持久化卷上,而非容器临时文件系统的 /root 下 + processEnv.put("HOME", cwd); + RuntimeConfig config = new RuntimeConfig(); + config.setUserId(userId); + config.setProviderKey(providerKey); + config.setCommand(providerConfig.getCommand()); + config.setArgs(List.of(providerConfig.getArgs())); + config.setCwd(cwd); + config.setEnv(processEnv); + + if (sandboxType == SandboxType.REMOTE) { + logger.info("Remote runtime: host={}", properties.getRemote().getHost()); + } + + return config; + } + + /** + * Intercept session/new and session/load requests and replace the cwd parameter + * with the absolute workspace path so that the ACP CLI knows the real directory. + */ + private String rewriteSessionCwd(String sessionId, String payload) { + // Fast-path: skip JSON parsing for messages that clearly aren't session/new or session/load + if (!payload.contains(SESSION_NEW_METHOD) && !payload.contains(SESSION_LOAD_METHOD)) { + return payload; + } + String cwd = connectionManager.getCwd(sessionId); + if (cwd == null) { + return payload; + } + try { + JsonNode root = objectMapper.readTree(payload); + JsonNode methodNode = root.get("method"); + if (methodNode == null) { + return payload; + } + String method = methodNode.asText(); + if (!SESSION_NEW_METHOD.equals(method) && !SESSION_LOAD_METHOD.equals(method)) { + return payload; + } + JsonNode params = root.get("params"); + if (params == null || !params.isObject()) { + return payload; + } + ((ObjectNode) params).put("cwd", cwd); + return objectMapper.writeValueAsString(root); + } catch (Exception e) { + logger.debug("Failed to rewrite session cwd, forwarding original payload", e); + return payload; + } + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/WebSocketPingScheduler.java b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/WebSocketPingScheduler.java new file mode 100644 index 000000000..22c57cb8d --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/hicoding/websocket/WebSocketPingScheduler.java @@ -0,0 +1,84 @@ +package com.alibaba.himarket.service.hicoding.websocket; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; +import org.springframework.web.socket.PingMessage; +import org.springframework.web.socket.WebSocketSession; + +/** + * WebSocket 协议级 ping 调度器。 + * + *

管理共享的 ScheduledExecutorService,为每个 WebSocketSession 调度周期性 ping。 + * 作为单例 Bean 供 HiCodingWebSocketHandler 和 TerminalWebSocketHandler 共享。 + */ +@Component +public class WebSocketPingScheduler { + + private static final Logger logger = LoggerFactory.getLogger(WebSocketPingScheduler.class); + + static final long PING_INTERVAL_SECONDS = 30; + + private final ScheduledExecutorService scheduler = + Executors.newScheduledThreadPool( + 2, + r -> { + Thread t = new Thread(r, "ws-ping-scheduler"); + t.setDaemon(true); + return t; + }); + + private final ConcurrentHashMap> pingFutures = + new ConcurrentHashMap<>(); + + /** + * 为指定 session 启动 ping 定时器。 + * + *

每 {@value PING_INTERVAL_SECONDS} 秒发送一个 WebSocket 协议级 PingMessage。 + * 对同一 sessionId 重复调用时,先停止旧的定时器再注册新的。 + */ + public void startPing(WebSocketSession session) { + String sessionId = session.getId(); + + // 重复调用时先停止旧的 + stopPing(sessionId); + + ScheduledFuture future = + scheduler.scheduleAtFixedRate( + () -> { + try { + if (!session.isOpen()) { + return; + } + session.sendMessage(new PingMessage()); + } catch (Exception e) { + logger.warn( + "[WS-Ping] Failed to send ping for session {}: {}", + sessionId, + e.getMessage()); + } + }, + PING_INTERVAL_SECONDS, + PING_INTERVAL_SECONDS, + TimeUnit.SECONDS); + + pingFutures.put(sessionId, future); + logger.info("[WS-Ping] Started ping scheduler for session {}", sessionId); + } + + /** + * 停止指定 session 的 ping 定时器。 + */ + public void stopPing(String sessionId) { + ScheduledFuture future = pingFutures.remove(sessionId); + if (future != null) { + future.cancel(false); + logger.info("[WS-Ping] Stopped ping scheduler for session {}", sessionId); + } + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/impl/CodingSessionServiceImpl.java b/himarket-server/src/main/java/com/alibaba/himarket/service/impl/CodingSessionServiceImpl.java new file mode 100644 index 000000000..6f8b7191d --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/impl/CodingSessionServiceImpl.java @@ -0,0 +1,95 @@ +package com.alibaba.himarket.service.impl; + +import com.alibaba.himarket.core.constant.Resources; +import com.alibaba.himarket.core.exception.BusinessException; +import com.alibaba.himarket.core.exception.ErrorCode; +import com.alibaba.himarket.core.security.ContextHolder; +import com.alibaba.himarket.core.utils.IdGenerator; +import com.alibaba.himarket.dto.params.coding.CreateCodingSessionParam; +import com.alibaba.himarket.dto.params.coding.UpdateCodingSessionParam; +import com.alibaba.himarket.dto.result.coding.CodingSessionResult; +import com.alibaba.himarket.dto.result.common.PageResult; +import com.alibaba.himarket.entity.CodingSession; +import com.alibaba.himarket.repository.CodingSessionRepository; +import com.alibaba.himarket.service.CodingSessionService; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +@Service +@RequiredArgsConstructor +@Slf4j +@Transactional +public class CodingSessionServiceImpl implements CodingSessionService { + + private final CodingSessionRepository sessionRepository; + private final ContextHolder contextHolder; + + private static final int MAX_SESSIONS_PER_USER = 50; + + @Override + public CodingSessionResult createSession(CreateCodingSessionParam param) { + String sessionId = IdGenerator.genSessionId(); + CodingSession session = param.convertTo(); + session.setUserId(contextHolder.getUser()); + session.setSessionId(sessionId); + + sessionRepository.save(session); + cleanupExtraSessions(); + + return new CodingSessionResult().convertFrom(session); + } + + @Override + public PageResult listSessions(Pageable pageable) { + Page sessions = + sessionRepository.findByUserIdOrderByUpdatedAtDesc( + contextHolder.getUser(), pageable); + + return new PageResult() + .convertFrom(sessions, session -> new CodingSessionResult().convertFrom(session)); + } + + @Override + public CodingSessionResult updateSession(String sessionId, UpdateCodingSessionParam param) { + CodingSession session = findUserSession(sessionId); + param.update(session); + + sessionRepository.saveAndFlush(session); + + return new CodingSessionResult().convertFrom(session); + } + + @Override + public void deleteSession(String sessionId) { + CodingSession session = findUserSession(sessionId); + sessionRepository.delete(session); + } + + private CodingSession findUserSession(String sessionId) { + return sessionRepository + .findBySessionIdAndUserId(sessionId, contextHolder.getUser()) + .orElseThrow( + () -> + new BusinessException( + ErrorCode.NOT_FOUND, Resources.CHAT_SESSION, sessionId)); + } + + private void cleanupExtraSessions() { + String userId = contextHolder.getUser(); + int count = sessionRepository.countByUserId(userId); + if (count > MAX_SESSIONS_PER_USER) { + // Keep the newest MAX_SESSIONS_PER_USER sessions, delete the rest. + // Results are ordered by updatedAt DESC (newest first). + // Page 0 = newest 50 (keep), page 1+ = older (delete). + sessionRepository + .findByUserIdOrderByUpdatedAtDesc( + userId, PageRequest.of(1, MAX_SESSIONS_PER_USER)) + .forEach(sessionRepository::delete); + } + } +} diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/impl/ConsumerServiceImpl.java b/himarket-server/src/main/java/com/alibaba/himarket/service/impl/ConsumerServiceImpl.java index 19dfcac9a..786fa72dd 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/service/impl/ConsumerServiceImpl.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/impl/ConsumerServiceImpl.java @@ -758,7 +758,11 @@ public void setPrimaryConsumer(String consumerId) { @Override public ConsumerResult getPrimaryConsumer() { - String developerId = contextHolder.getUser(); + return getPrimaryConsumer(contextHolder.getUser()); + } + + @Override + public ConsumerResult getPrimaryConsumer(String developerId) { return consumerRepository .findByDeveloperIdAndIsPrimary(developerId, true) .map( diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/impl/NacosServiceImpl.java b/himarket-server/src/main/java/com/alibaba/himarket/service/impl/NacosServiceImpl.java index 7f720694d..1c7343748 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/service/impl/NacosServiceImpl.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/impl/NacosServiceImpl.java @@ -56,11 +56,13 @@ import com.alibaba.nacos.maintainer.client.ai.McpMaintainerService; import com.alibaba.nacos.maintainer.client.naming.NamingMaintainerFactory; import com.alibaba.nacos.maintainer.client.naming.NamingMaintainerService; +import com.alibaba.nacos.maintainer.client.utils.ParamUtil; import com.aliyun.mse20190531.Client; import com.aliyun.mse20190531.models.ListClustersRequest; import com.aliyun.mse20190531.models.ListClustersResponse; import com.aliyun.mse20190531.models.ListClustersResponseBody; import com.aliyun.teautil.models.RuntimeOptions; +import jakarta.annotation.PostConstruct; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -74,6 +76,7 @@ import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; @Service @Slf4j @@ -91,6 +94,12 @@ public class NacosServiceImpl implements NacosService { // 添加缓存,用于存储AiMaintainerService实例 private final Map aiServiceCache = new ConcurrentHashMap<>(); + @PostConstruct + void initNacosClientTimeout() { + ParamUtil.setReadTimeout(90_000); + log.info("Nacos maintainer-client read timeout set to 90s"); + } + @Override public PageResult listNacosInstances(Pageable pageable) { Page nacosInstances = nacosInstanceRepository.findAll(pageable); @@ -139,6 +148,11 @@ public void createNacosInstance(CreateNacosParam param) { nacosInstance.setAdminId(contextHolder.getUser()); + // 第一个实例自动标记为默认 + if (nacosInstanceRepository.findByIsDefaultTrue().isEmpty()) { + nacosInstance.setIsDefault(true); + } + nacosInstanceRepository.save(nacosInstance); } @@ -166,6 +180,9 @@ public void updateNacosInstance(String nacosId, UpdateNacosParam param) { @Override public void deleteNacosInstance(String nacosId) { NacosInstance nacosInstance = findNacosInstance(nacosId); + if (Boolean.TRUE.equals(nacosInstance.getIsDefault())) { + throw new BusinessException(ErrorCode.INVALID_PARAMETER, "默认 Nacos 实例不允许删除,请先切换默认实例"); + } // 从缓存中移除相关条目 String cacheKey = buildCacheKey(nacosInstance); aiServiceCache.remove(cacheKey); @@ -769,4 +786,87 @@ public String fetchAgentConfig(String nacosId, NacosRefConfig nacosRefConfig) { ErrorCode.INTERNAL_ERROR, "Failed to fetch agent config: " + e.getMessage()); } } + + @Override + public AiMaintainerService getAiMaintainerService(String nacosId) { + NacosInstance nacosInstance = findNacosInstance(nacosId); + return buildDynamicAiService(nacosInstance); + } + + @Override + public NacosInstance findNacosInstanceById(String nacosId) { + return findNacosInstance(nacosId); + } + + @Override + public NacosResult getDefaultNacosInstance() { + return nacosInstanceRepository + .findByIsDefaultTrue() + .map(instance -> new NacosResult().convertFrom(instance)) + .orElse(null); + } + + @Override + @Transactional + public void setDefaultNacosInstance(String nacosId) { + NacosInstance newDefault = findNacosInstance(nacosId); + // 取消旧默认 + nacosInstanceRepository + .findByIsDefaultTrue() + .ifPresent( + old -> { + old.setIsDefault(false); + nacosInstanceRepository.save(old); + }); + // 设置新默认 + newDefault.setIsDefault(true); + nacosInstanceRepository.save(newDefault); + } + + @Override + public void setDefaultNamespace(String nacosId, String namespaceId) { + NacosInstance instance = findNacosInstance(nacosId); + + // 用已保存的认证信息连接 Nacos,验证命名空间是否存在 + NamingMaintainerService namingService = buildDynamicNamingService(instance, ""); + try { + List namespaces = namingService.getNamespaceList(); + boolean exists = + namespaces != null + && namespaces.stream() + .anyMatch( + ns -> { + try { + java.lang.reflect.Method getId = + ns.getClass().getMethod("getNamespace"); + Object id = getId.invoke(ns); + // Nacos 中 public namespace 的 ID 为空字符串 + if ("public".equals(namespaceId)) { + return id == null + || "".equals(id) + || "public".equals(id); + } + return namespaceId.equals(id); + } catch (Exception e) { + return false; + } + }); + if (!exists) { + throw new BusinessException(ErrorCode.NOT_FOUND, "命名空间不存在: " + namespaceId); + } + } catch (BusinessException e) { + throw e; + } catch (NacosException e) { + log.error( + "Error verifying namespace from Nacos: nacosId={}, namespaceId={}", + nacosId, + namespaceId, + e); + throw new BusinessException( + ErrorCode.INTERNAL_ERROR, "连接 Nacos 验证命名空间失败: " + e.getErrMsg()); + } + + instance.setDefaultNamespace(namespaceId); + nacosInstanceRepository.save(instance); + } } diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/impl/ProductServiceImpl.java b/himarket-server/src/main/java/com/alibaba/himarket/service/impl/ProductServiceImpl.java index 3e1110748..1720bdc79 100644 --- a/himarket-server/src/main/java/com/alibaba/himarket/service/impl/ProductServiceImpl.java +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/impl/ProductServiceImpl.java @@ -42,6 +42,7 @@ import com.alibaba.himarket.dto.result.mcp.MCPConfigResult; import com.alibaba.himarket.dto.result.mcp.McpToolListResult; import com.alibaba.himarket.dto.result.model.ModelConfigResult; +import com.alibaba.himarket.dto.result.nacos.NacosResult; import com.alibaba.himarket.dto.result.portal.PortalResult; import com.alibaba.himarket.dto.result.product.ProductPublicationResult; import com.alibaba.himarket.dto.result.product.ProductRefResult; @@ -56,6 +57,8 @@ import com.alibaba.himarket.support.enums.ProductType; import com.alibaba.himarket.support.enums.SourceType; import com.alibaba.himarket.support.product.NacosRefConfig; +import com.alibaba.himarket.support.product.ProductFeature; +import com.alibaba.himarket.support.product.SkillConfig; import com.github.benmanes.caffeine.cache.Cache; import io.agentscope.core.tool.mcp.McpClientWrapper; import jakarta.persistence.criteria.Predicate; @@ -126,6 +129,29 @@ public ProductResult createProduct(CreateProductParam param) { product.setProductId(productId); product.setAdminId(contextHolder.getUser()); + // AGENT_SKILL products are immediately ready (no gateway/nacos binding needed) + if (param.getType() == ProductType.AGENT_SKILL) { + product.setStatus(ProductStatus.READY); + // 自动绑定默认 Nacos 实例 + NacosResult defaultNacos = nacosService.getDefaultNacosInstance(); + if (defaultNacos != null) { + ProductFeature feature = product.getFeature(); + if (feature == null) { + feature = new ProductFeature(); + product.setFeature(feature); + } + SkillConfig skillConfig = feature.getSkillConfig(); + if (skillConfig == null) { + skillConfig = new SkillConfig(); + feature.setSkillConfig(skillConfig); + } + skillConfig.setNacosId(defaultNacos.getNacosId()); + skillConfig.setNamespace(defaultNacos.getDefaultNamespace()); + } + } + + validateModelFeature(product.getType(), product.getFeature()); + productRepository.save(product); // Set product categories @@ -166,6 +192,11 @@ public PageResult listProducts(QueryProductParam param, Pageable param.setPortalId(contextHolder.getPortal()); } + // Non-admin users can only see published products + if (!contextHolder.isAdministrator()) { + param.setStatus(ProductStatus.PUBLISHED); + } + if (param.getType() != null && param.hasFilter()) { return listProductsWithFilter(param, pageable); } @@ -202,6 +233,8 @@ public ProductResult updateProduct(String productId, UpdateProductParam param) { } param.update(product); + validateModelFeature(product.getType(), product.getFeature()); + productRepository.saveAndFlush(product); // Set product categories @@ -220,8 +253,8 @@ public void publishProduct(String productId, String portalId) { Product product = findProduct(productId); product.setStatus(ProductStatus.PUBLISHED); - // Cannot publish if not linked - if (getProductRef(productId) == null) { + // AGENT_SKILL products don't require a ProductRef (no gateway/nacos binding needed) + if (product.getType() != ProductType.AGENT_SKILL && getProductRef(productId) == null) { throw new BusinessException(ErrorCode.INVALID_REQUEST, "API product not linked to API"); } @@ -318,6 +351,17 @@ public void deleteProduct(String productId) { SpringUtil.getApplicationContext().publishEvent(new ProductDeletingEvent(productId)); } + private void validateModelFeature(ProductType type, ProductFeature feature) { + if (type == ProductType.MODEL_API) { + if (feature == null + || feature.getModelFeature() == null + || StrUtil.isBlank(feature.getModelFeature().getModel())) { + throw new BusinessException( + ErrorCode.INVALID_REQUEST, "MODEL_API product must specify a model name"); + } + } + } + private Product findProduct(String productId) { return productRepository .findByProductId(productId) @@ -543,6 +587,31 @@ public McpToolListResult listMcpTools(String productId) { return result; } + @Override + public void updateSkillNacos(String productId, String nacosId, String namespace) { + Product product = findProduct(productId); + if (product.getType() != ProductType.AGENT_SKILL) { + throw new BusinessException( + ErrorCode.INVALID_REQUEST, "Only AGENT_SKILL products can update skill nacos"); + } + // Verify nacos instance exists + nacosService.getNacosInstance(nacosId); + + ProductFeature feature = product.getFeature(); + if (feature == null) { + feature = new ProductFeature(); + product.setFeature(feature); + } + SkillConfig skillConfig = feature.getSkillConfig(); + if (skillConfig == null) { + skillConfig = new SkillConfig(); + feature.setSkillConfig(skillConfig); + } + skillConfig.setNacosId(nacosId); + skillConfig.setNamespace(namespace); + productRepository.save(product); + } + private void syncConfig(Product product, ProductRef productRef) { SourceType sourceType = productRef.getSourceType(); @@ -667,6 +736,11 @@ private void fillProducts(List products) { if (productRef != null) { fillProductConfig(product, productRef); } + + // Fill skill config from feature + if (product.getFeature() != null && product.getFeature().getSkillConfig() != null) { + product.setSkillConfig(product.getFeature().getSkillConfig()); + } } } diff --git a/himarket-server/src/main/java/com/alibaba/himarket/service/impl/SkillServiceImpl.java b/himarket-server/src/main/java/com/alibaba/himarket/service/impl/SkillServiceImpl.java new file mode 100644 index 000000000..9eb9e9bee --- /dev/null +++ b/himarket-server/src/main/java/com/alibaba/himarket/service/impl/SkillServiceImpl.java @@ -0,0 +1,230 @@ +package com.alibaba.himarket.service.impl; + +import com.alibaba.himarket.core.exception.BusinessException; +import com.alibaba.himarket.core.exception.ErrorCode; +import com.alibaba.himarket.core.skill.FileTreeBuilder; +import com.alibaba.himarket.core.skill.SkillMdBuilder; +import com.alibaba.himarket.dto.result.skill.SkillFileContentResult; +import com.alibaba.himarket.dto.result.skill.SkillFileTreeNode; +import com.alibaba.himarket.service.NacosService; +import com.alibaba.himarket.service.SkillService; +import com.alibaba.nacos.api.ai.model.skills.Skill; +import com.alibaba.nacos.api.ai.model.skills.SkillBasicInfo; +import com.alibaba.nacos.api.ai.model.skills.SkillResource; +import com.alibaba.nacos.api.exception.NacosException; +import com.alibaba.nacos.api.model.Page; +import com.alibaba.nacos.maintainer.client.ai.AiMaintainerService; +import jakarta.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +/** + * Nacos Skill SDK 透传服务实现。 + */ +@Service +@Slf4j +@RequiredArgsConstructor +public class SkillServiceImpl implements SkillService { + + private final NacosService nacosService; + + @Override + public String createSkill(String nacosId, String namespace, Skill skill) { + try { + AiMaintainerService service = nacosService.getAiMaintainerService(nacosId); + return service.registerSkill(namespace, skill); + } catch (NacosException e) { + throw wrapNacosException("创建 Skill 失败", e); + } + } + + @Override + public Skill getSkillDetail(String nacosId, String namespace, String skillName) { + try { + AiMaintainerService service = nacosService.getAiMaintainerService(nacosId); + Skill skill = service.getSkillDetail(namespace, skillName); + if (skill == null) { + throw new BusinessException(ErrorCode.NOT_FOUND, "Skill", skillName); + } + return skill; + } catch (NacosException e) { + throw wrapNacosException("查询 Skill 详情失败", e); + } + } + + @Override + public void updateSkill(String nacosId, String namespace, Skill skill) { + try { + AiMaintainerService service = nacosService.getAiMaintainerService(nacosId); + service.updateSkill(namespace, skill); + } catch (NacosException e) { + throw wrapNacosException("更新 Skill 失败", e); + } + } + + @Override + public void deleteSkill(String nacosId, String namespace, String skillName) { + try { + AiMaintainerService service = nacosService.getAiMaintainerService(nacosId); + service.deleteSkill(namespace, skillName); + } catch (NacosException e) { + throw wrapNacosException("删除 Skill 失败", e); + } + } + + @Override + public Page listSkills( + String nacosId, String namespace, String search, int pageNo, int pageSize) { + try { + AiMaintainerService service = nacosService.getAiMaintainerService(nacosId); + return service.listSkills(namespace, null, search, pageNo, pageSize); + } catch (NacosException e) { + throw wrapNacosException("查询 Skill 列表失败", e); + } + } + + @Override + public String getSkillDocument(String nacosId, String namespace, String skillName) { + Skill skill = getSkillDetail(nacosId, namespace, skillName); + return SkillMdBuilder.build(skill); + } + + @Override + public List getFileTree(String nacosId, String namespace, String skillName) { + Skill skill = getSkillDetail(nacosId, namespace, skillName); + return FileTreeBuilder.build(skill); + } + + @Override + public List getAllFiles( + String nacosId, String namespace, String skillName) { + Skill skill = getSkillDetail(nacosId, namespace, skillName); + List results = new ArrayList<>(); + + // SKILL.md 虚拟文件 + String skillMd = SkillMdBuilder.build(skill); + SkillFileContentResult mdResult = new SkillFileContentResult(); + mdResult.setPath("SKILL.md"); + mdResult.setContent(skillMd); + mdResult.setEncoding("text"); + mdResult.setSize(skillMd.getBytes(StandardCharsets.UTF_8).length); + results.add(mdResult); + + // resource 文件 + if (skill.getResource() != null) { + for (Map.Entry entry : skill.getResource().entrySet()) { + SkillResource resource = entry.getValue(); + SkillFileContentResult fileResult = new SkillFileContentResult(); + String path = buildResourcePath(resource); + fileResult.setPath(path); + fileResult.setContent(resource.getContent()); + boolean isBinary = + resource.getMetadata() != null + && "base64".equals(resource.getMetadata().get("encoding")); + fileResult.setEncoding(isBinary ? "base64" : "text"); + fileResult.setSize( + resource.getContent() != null + ? resource.getContent().getBytes(StandardCharsets.UTF_8).length + : 0); + results.add(fileResult); + } + } + return results; + } + + @Override + public SkillFileContentResult getFileContent( + String nacosId, String namespace, String skillName, String path) { + Skill skill = getSkillDetail(nacosId, namespace, skillName); + + if ("SKILL.md".equals(path)) { + String skillMd = SkillMdBuilder.build(skill); + SkillFileContentResult result = new SkillFileContentResult(); + result.setPath("SKILL.md"); + result.setContent(skillMd); + result.setEncoding("text"); + result.setSize(skillMd.getBytes(StandardCharsets.UTF_8).length); + return result; + } + + if (skill.getResource() != null) { + for (Map.Entry entry : skill.getResource().entrySet()) { + SkillResource resource = entry.getValue(); + String resourcePath = buildResourcePath(resource); + if (path.equals(resourcePath)) { + SkillFileContentResult result = new SkillFileContentResult(); + result.setPath(resourcePath); + result.setContent(resource.getContent()); + boolean isBinary = + resource.getMetadata() != null + && "base64".equals(resource.getMetadata().get("encoding")); + result.setEncoding(isBinary ? "base64" : "text"); + result.setSize( + resource.getContent() != null + ? resource.getContent().getBytes(StandardCharsets.UTF_8).length + : 0); + return result; + } + } + } + throw new BusinessException(ErrorCode.NOT_FOUND, "Skill 资源文件", path); + } + + @Override + public void downloadZip( + String nacosId, String namespace, String skillName, HttpServletResponse response) + throws IOException { + Skill skill = getSkillDetail(nacosId, namespace, skillName); + + response.setContentType("application/zip"); + response.setHeader("Content-Disposition", "attachment; filename=\"" + skillName + ".zip\""); + + try (ZipOutputStream zos = new ZipOutputStream(response.getOutputStream())) { + // SKILL.md + String skillMd = SkillMdBuilder.build(skill); + zos.putNextEntry(new ZipEntry(skillName + "/SKILL.md")); + zos.write(skillMd.getBytes(StandardCharsets.UTF_8)); + zos.closeEntry(); + + // resource 文件 + if (skill.getResource() != null) { + for (Map.Entry entry : skill.getResource().entrySet()) { + SkillResource resource = entry.getValue(); + String path = buildResourcePath(resource); + zos.putNextEntry(new ZipEntry(skillName + "/" + path)); + boolean isBinary = + resource.getMetadata() != null + && "base64".equals(resource.getMetadata().get("encoding")); + if (isBinary && resource.getContent() != null) { + zos.write(java.util.Base64.getDecoder().decode(resource.getContent())); + } else if (resource.getContent() != null) { + zos.write(resource.getContent().getBytes(StandardCharsets.UTF_8)); + } + zos.closeEntry(); + } + } + } + } + + private String buildResourcePath(SkillResource resource) { + String type = resource.getType(); + String name = resource.getName(); + if (type != null && !type.isEmpty()) { + return type + "/" + name; + } + return name; + } + + private BusinessException wrapNacosException(String message, NacosException e) { + log.error("{}: {}", message, e.getErrMsg(), e); + return new BusinessException(ErrorCode.INTERNAL_ERROR, message + ": " + e.getErrMsg()); + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/controller/CliProviderControllerTest.java b/himarket-server/src/test/java/com/alibaba/himarket/controller/CliProviderControllerTest.java new file mode 100644 index 000000000..5fdf9d52e --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/controller/CliProviderControllerTest.java @@ -0,0 +1,176 @@ +package com.alibaba.himarket.controller; + +import static org.junit.jupiter.api.Assertions.*; + +import com.alibaba.himarket.config.AcpProperties; +import com.alibaba.himarket.config.AcpProperties.CliProviderConfig; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +/** + * 单元测试:验证 CliProviderController 的 provider 列表和命令可用性检测逻辑。 + */ +class CliProviderControllerTest { + + private AcpProperties properties; + private CliProviderController controller; + + @BeforeEach + void setUp() { + properties = new AcpProperties(); + properties.setDefaultProvider("qodercli"); + + Map providers = new LinkedHashMap<>(); + + CliProviderConfig qoder = new CliProviderConfig(); + qoder.setDisplayName("Qoder CLI"); + qoder.setCommand("qodercli"); + qoder.setArgs("--acp"); + qoder.setCompatibleRuntimes(List.of(SandboxType.REMOTE, SandboxType.OPEN_SANDBOX)); + providers.put("qodercli", qoder); + + CliProviderConfig kiro = new CliProviderConfig(); + kiro.setDisplayName("Kiro CLI"); + kiro.setCommand("kiro-cli"); + kiro.setArgs("acp"); + kiro.setCompatibleRuntimes(List.of(SandboxType.REMOTE)); + providers.put("kiro-cli", kiro); + + // npx 通常在安装了 Node.js 的机器上可用 + CliProviderConfig claude = new CliProviderConfig(); + claude.setDisplayName("Claude Code"); + claude.setCommand("npx"); + claude.setArgs("@zed-industries/claude-agent-acp"); + claude.setCompatibleRuntimes(List.of(SandboxType.REMOTE)); + providers.put("claude-code", claude); + + // 一个肯定不存在的命令 + CliProviderConfig fake = new CliProviderConfig(); + fake.setDisplayName("Fake CLI"); + fake.setCommand("this-command-definitely-does-not-exist-xyz"); + fake.setArgs("--acp"); + // 不设置 compatibleRuntimes,确保不会因为 canRunInSandbox 短路为 available + providers.put("fake-cli", fake); + + properties.setProviders(providers); + controller = new CliProviderController(properties, null, null, null); + } + + @Test + void testListProvidersReturnsAllProviders() { + List result = controller.listProviders(); + assertEquals(4, result.size()); + } + + @Test + void testDefaultProviderIsMarked() { + List result = controller.listProviders(); + long defaultCount = + result.stream().filter(CliProviderController.CliProviderInfo::isDefault).count(); + assertEquals(1, defaultCount); + + CliProviderController.CliProviderInfo defaultProvider = + result.stream() + .filter(CliProviderController.CliProviderInfo::isDefault) + .findFirst() + .orElseThrow(); + assertEquals("qodercli", defaultProvider.key()); + } + + @Test + void testFakeCommandIsNotAvailable() { + List result = controller.listProviders(); + CliProviderController.CliProviderInfo fake = + result.stream().filter(p -> p.key().equals("fake-cli")).findFirst().orElseThrow(); + assertFalse(fake.available(), "不存在的命令应该标记为不可用"); + } + + @Test + void testIsCommandAvailableWithExistingCommand() { + // 'ls' 在所有 Unix 系统上都存在 + assertTrue(CliProviderController.isCommandAvailable("ls")); + } + + @Test + void testIsCommandAvailableWithNonExistingCommand() { + assertFalse( + CliProviderController.isCommandAvailable( + "this-command-definitely-does-not-exist-xyz")); + } + + @Test + void testIsCommandAvailableWithNull() { + assertFalse(CliProviderController.isCommandAvailable(null)); + } + + @Test + void testIsCommandAvailableWithBlank() { + assertFalse(CliProviderController.isCommandAvailable(" ")); + } + + @Test + void testDisplayNameFallsBackToKey() { + CliProviderConfig noName = new CliProviderConfig(); + noName.setCommand("ls"); + noName.setArgs("--help"); + // displayName 不设置,应该 fallback 到 key + properties.getProviders().put("no-name-cli", noName); + + List result = controller.listProviders(); + CliProviderController.CliProviderInfo noNameInfo = + result.stream() + .filter(p -> p.key().equals("no-name-cli")) + .findFirst() + .orElseThrow(); + assertEquals("no-name-cli", noNameInfo.displayName()); + } + + @Test + void testAvailableFieldIncludedInResponse() { + List result = controller.listProviders(); + // 每个 provider 都应该有 available 字段(不管 true 还是 false) + for (CliProviderController.CliProviderInfo info : result) { + assertNotNull(info.key()); + assertNotNull(info.displayName()); + // available 是 boolean 基本类型,不会为 null + } + } + + @Test + void testCompatibleRuntimesIncludedInResponse() { + List result = controller.listProviders(); + + CliProviderController.CliProviderInfo qoder = + result.stream().filter(p -> p.key().equals("qodercli")).findFirst().orElseThrow(); + assertEquals( + List.of(SandboxType.REMOTE, SandboxType.OPEN_SANDBOX), qoder.compatibleRuntimes()); + + CliProviderController.CliProviderInfo claude = + result.stream() + .filter(p -> p.key().equals("claude-code")) + .findFirst() + .orElseThrow(); + assertEquals(List.of(SandboxType.REMOTE), claude.compatibleRuntimes()); + } + + @Test + void testProviderWithNullCompatibleRuntimes() { + CliProviderConfig noRuntime = new CliProviderConfig(); + noRuntime.setDisplayName("No Runtime CLI"); + noRuntime.setCommand("ls"); + // 不设置 compatibleRuntimes + properties.getProviders().put("no-runtime-cli", noRuntime); + + List result = controller.listProviders(); + CliProviderController.CliProviderInfo noRuntimeInfo = + result.stream() + .filter(p -> p.key().equals("no-runtime-cli")) + .findFirst() + .orElseThrow(); + assertNull(noRuntimeInfo.compatibleRuntimes()); + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/core/skill/SkillMdParserTest.java b/himarket-server/src/test/java/com/alibaba/himarket/core/skill/SkillMdParserTest.java new file mode 100644 index 000000000..296b8e6e1 --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/core/skill/SkillMdParserTest.java @@ -0,0 +1,185 @@ +package com.alibaba.himarket.core.skill; + +import static org.junit.jupiter.api.Assertions.*; + +import com.alibaba.himarket.core.exception.BusinessException; +import java.util.LinkedHashMap; +import java.util.Map; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class SkillMdParserTest { + + private SkillMdParser parser; + + @BeforeEach + void setUp() { + parser = new SkillMdParser(); + } + + // ========== parse 测试 ========== + + @Test + void testParseStandardSkillMd() { + String content = + """ + --- + name: my-skill + description: "技能描述" + --- + # 技能标题 + + ## 使用说明 + + 正文内容 + """; + + SkillMdDocument doc = parser.parse(content); + + assertEquals("my-skill", doc.getFrontmatter().get("name")); + assertEquals("技能描述", doc.getFrontmatter().get("description")); + assertTrue(doc.getBody().contains("# 技能标题")); + assertTrue(doc.getBody().contains("正文内容")); + } + + @Test + void testParseEmptyBody() { + String content = + """ + --- + name: my-skill + --- + """; + + SkillMdDocument doc = parser.parse(content); + + assertEquals("my-skill", doc.getFrontmatter().get("name")); + assertEquals("", doc.getBody()); + } + + @Test + void testParseEmptyFrontmatter() { + String content = + """ + --- + --- + # Body only + """; + + SkillMdDocument doc = parser.parse(content); + + assertTrue(doc.getFrontmatter().isEmpty()); + assertTrue(doc.getBody().contains("# Body only")); + } + + @Test + void testParseNullContentThrows() { + BusinessException ex = assertThrows(BusinessException.class, () -> parser.parse(null)); + assertTrue(ex.getMessage().contains("内容不能为空")); + } + + @Test + void testParseEmptyContentThrows() { + BusinessException ex = assertThrows(BusinessException.class, () -> parser.parse("")); + assertTrue(ex.getMessage().contains("内容不能为空")); + } + + @Test + void testParseMissingOpeningDelimiterThrows() { + String content = "name: my-skill\n---\n# Body"; + BusinessException ex = assertThrows(BusinessException.class, () -> parser.parse(content)); + assertTrue(ex.getMessage().contains("缺少 YAML frontmatter 分隔符")); + } + + @Test + void testParseMissingClosingDelimiterThrows() { + String content = "---\nname: my-skill\n# Body without closing delimiter"; + BusinessException ex = assertThrows(BusinessException.class, () -> parser.parse(content)); + assertTrue(ex.getMessage().contains("缺少 YAML frontmatter 结束分隔符")); + } + + @Test + void testParseInvalidYamlThrows() { + String content = "---\n: invalid: yaml: [broken\n---\n# Body"; + BusinessException ex = assertThrows(BusinessException.class, () -> parser.parse(content)); + assertTrue(ex.getMessage().contains("YAML frontmatter 解析失败")); + } + + @Test + void testParseYamlNotMapThrows() { + String content = "---\n- item1\n- item2\n---\n# Body"; + BusinessException ex = assertThrows(BusinessException.class, () -> parser.parse(content)); + assertTrue(ex.getMessage().contains("期望键值对映射")); + } + + // ========== serialize 测试 ========== + + @Test + void testSerializeStandardDocument() { + Map frontmatter = new LinkedHashMap<>(); + frontmatter.put("name", "my-skill"); + frontmatter.put("description", "技能描述"); + SkillMdDocument doc = new SkillMdDocument(frontmatter, "# 技能标题\n\n正文内容\n"); + + String result = parser.serialize(doc); + + assertTrue(result.startsWith("---\n")); + assertTrue(result.contains("name: my-skill")); + assertTrue(result.contains("description: 技能描述")); + assertTrue(result.contains("---\n# 技能标题")); + assertTrue(result.contains("正文内容")); + } + + @Test + void testSerializeEmptyFrontmatter() { + SkillMdDocument doc = new SkillMdDocument(new LinkedHashMap<>(), "# Body\n"); + + String result = parser.serialize(doc); + + assertTrue(result.startsWith("---\n---\n")); + assertTrue(result.contains("# Body")); + } + + @Test + void testSerializeEmptyBody() { + Map frontmatter = new LinkedHashMap<>(); + frontmatter.put("name", "test"); + SkillMdDocument doc = new SkillMdDocument(frontmatter, ""); + + String result = parser.serialize(doc); + + assertTrue(result.contains("name: test")); + assertTrue(result.endsWith("---\n")); + } + + @Test + void testSerializeNullDocumentThrows() { + BusinessException ex = assertThrows(BusinessException.class, () -> parser.serialize(null)); + assertTrue(ex.getMessage().contains("不能为空")); + } + + // ========== 往返一致性测试 ========== + + @Test + void testRoundTripConsistency() { + String original = + """ + --- + name: my-skill + description: "技能描述" + --- + # 技能标题 + + ## 使用说明 + + 正文内容 + """; + + SkillMdDocument doc1 = parser.parse(original); + String serialized = parser.serialize(doc1); + SkillMdDocument doc2 = parser.parse(serialized); + + assertEquals(doc1.getFrontmatter(), doc2.getFrontmatter()); + assertEquals(doc1.getBody(), doc2.getBody()); + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/HiCodingFullE2ETest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/HiCodingFullE2ETest.java new file mode 100644 index 000000000..e645aa99c --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/HiCodingFullE2ETest.java @@ -0,0 +1,1628 @@ +package com.alibaba.himarket.service.hicoding; + +import static org.junit.jupiter.api.Assertions.*; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.io.IOException; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import org.junit.jupiter.api.*; +import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; +import org.junit.jupiter.api.Nested; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.socket.CloseStatus; +import org.springframework.web.socket.TextMessage; +import org.springframework.web.socket.WebSocketHttpHeaders; +import org.springframework.web.socket.WebSocketSession; +import org.springframework.web.socket.client.standard.StandardWebSocketClient; +import org.springframework.web.socket.handler.TextWebSocketHandler; + +/** + * HiCoding 全流程 E2E 测试。 + * + *

覆盖场景: + *

    + *
  • CodingSession REST API CRUD(创建、查询、更新、删除会话) + *
  • WebSocket 连接认证(有效/无效/缺失 token) + *
  • WebSocket ACP 协议握手(initialize) + *
  • 沙箱初始化流程(session/config → sandbox/status 通知) + *
  • 发起新会话(session/new)并验证 sessionId、models、modes + *
  • 加载历史会话(session/load) + *
  • 对话执行(session/prompt) + *
  • 初始化期间消息缓冲和回放 + *
  • 连接异常和优雅关闭 + *
  • 跨用户会话隔离 + *
+ * + *

依赖运行中的后端服务(localhost:8080),使用 {@code mvn test -Dgroups=integration} 显式启用。 + */ +@Tag("integration") +@DisplayName("HiCoding 全流程 E2E 测试") +class HiCodingFullE2ETest { + + private static final Logger log = LoggerFactory.getLogger(HiCodingFullE2ETest.class); + private static final ObjectMapper mapper = new ObjectMapper(); + + private static final String BASE_URL = "http://localhost:8080"; + private static final String WS_BASE = "ws://localhost:8080/ws/acp"; + private static final int WS_CONNECT_TIMEOUT_SEC = 10; + private static final int WS_RESPONSE_TIMEOUT_SEC = 30; + private static final int SANDBOX_INIT_TIMEOUT_SEC = 60; + + private static final HttpClient httpClient = + HttpClient.newBuilder().connectTimeout(Duration.ofSeconds(10)).build(); + + // ───────────────────────────────────────────────────────────────── + // 前置条件检查 + // ───────────────────────────────────────────────────────────────── + + @BeforeEach + void ensureServerRunning() { + Assumptions.assumeTrue(isServerRunning(), "后端服务未在 localhost:8080 运行,跳过 E2E 测试"); + } + + // ═════════════════════════════════════════════════════════════════ + // 1. CodingSession REST API CRUD 测试 + // ═════════════════════════════════════════════════════════════════ + + @Nested + @DisplayName("1. CodingSession REST API") + @TestMethodOrder(OrderAnnotation.class) + class CodingSessionCrudTest { + + /** + * 1.1 创建会话 → 查询列表 → 更新标题 → 删除会话,完整 CRUD 闭环。 + */ + @Test + @Order(1) + @DisplayName("1.1 完整 CRUD 生命周期") + void testFullCrudLifecycle() throws Exception { + String token = obtainDeveloperToken(); + assertNotNull(token, "获取开发者 token 失败"); + log.info("=== 1.1 CodingSession CRUD 生命周期测试 ==="); + + // ── CREATE ── + String cliSessionId = "cli-" + UUID.randomUUID(); + ObjectNode createBody = mapper.createObjectNode(); + createBody.put("cliSessionId", cliSessionId); + createBody.put("title", "E2E Test Session"); + createBody.put("providerKey", "qwen-code"); + createBody.put("cwd", "/workspace/test"); + + HttpResponse createResp = + httpPost("/coding-sessions", createBody.toString(), token); + log.info("[CREATE] status={}, body={}", createResp.statusCode(), createResp.body()); + assertEquals(200, createResp.statusCode(), "创建会话应返回 200"); + + JsonNode createResult = parseSuccessData(createResp.body()); + assertNotNull(createResult, "创建结果不应为 null"); + String sessionId = createResult.get("sessionId").asText(); + assertFalse(sessionId.isBlank(), "sessionId 不应为空"); + assertEquals(cliSessionId, createResult.get("cliSessionId").asText()); + assertEquals("E2E Test Session", createResult.get("title").asText()); + log.info("[CREATE] sessionId={}", sessionId); + + // ── LIST ── + HttpResponse listResp = httpGet("/coding-sessions?page=0&size=10", token); + log.info("[LIST] status={}", listResp.statusCode()); + assertEquals(200, listResp.statusCode(), "查询列表应返回 200"); + + JsonNode listResult = parseSuccessData(listResp.body()); + assertNotNull(listResult, "列表结果不应为 null"); + assertTrue(listResult.has("content"), "列表应包含 content 字段"); + JsonNode content = listResult.get("content"); + assertTrue(content.isArray(), "content 应为数组"); + assertTrue(content.size() > 0, "列表不应为空"); + + // 验证刚创建的会话在列表中 + boolean found = false; + for (JsonNode item : content) { + if (sessionId.equals(item.get("sessionId").asText())) { + found = true; + break; + } + } + assertTrue(found, "刚创建的会话应在列表中"); + log.info("[LIST] 找到会话 sessionId={}, 列表总数={}", sessionId, content.size()); + + // ── UPDATE ── + ObjectNode updateBody = mapper.createObjectNode(); + updateBody.put("title", "Updated Title"); + + HttpResponse updateResp = + httpPatch("/coding-sessions/" + sessionId, updateBody.toString(), token); + log.info("[UPDATE] status={}", updateResp.statusCode()); + assertEquals(200, updateResp.statusCode(), "更新会话应返回 200"); + + JsonNode updateResult = parseSuccessData(updateResp.body()); + assertEquals("Updated Title", updateResult.get("title").asText()); + log.info("[UPDATE] 标题已更新为 '{}'", updateResult.get("title").asText()); + + // ── DELETE ── + HttpResponse deleteResp = httpDelete("/coding-sessions/" + sessionId, token); + log.info("[DELETE] status={}", deleteResp.statusCode()); + assertEquals(200, deleteResp.statusCode(), "删除会话应返回 200"); + + // 验证删除后查不到 + HttpResponse listResp2 = httpGet("/coding-sessions?page=0&size=100", token); + JsonNode listResult2 = parseSuccessData(listResp2.body()); + boolean foundAfterDelete = false; + for (JsonNode item : listResult2.get("content")) { + if (sessionId.equals(item.get("sessionId").asText())) { + foundAfterDelete = true; + break; + } + } + assertFalse(foundAfterDelete, "删除后的会话不应在列表中"); + log.info("[DELETE] 确认会话已删除"); + } + + /** + * 1.2 创建会话时缺少必填字段 cliSessionId,应返回参数校验错误。 + */ + @Test + @Order(2) + @DisplayName("1.2 创建会话 - 缺少必填字段") + void testCreateSessionMissingCliSessionId() throws Exception { + String token = obtainDeveloperToken(); + + ObjectNode body = mapper.createObjectNode(); + body.put("title", "No CLI Session ID"); + // 不设置 cliSessionId + + HttpResponse resp = httpPost("/coding-sessions", body.toString(), token); + log.info("[VALIDATION] status={}, body={}", resp.statusCode(), resp.body()); + assertEquals(400, resp.statusCode(), "缺少必填字段应返回 400"); + + JsonNode responseBody = mapper.readTree(resp.body()); + assertNotEquals("SUCCESS", responseBody.path("code").asText(), "应返回错误码"); + log.info("[VALIDATION] 参数校验错误验证通过"); + } + + /** + * 1.3 未携带 token 访问需认证的接口,应返回 401。 + */ + @Test + @Order(3) + @DisplayName("1.3 无 token 访问") + void testAccessWithoutToken() throws Exception { + HttpResponse resp = httpGet("/coding-sessions?page=0&size=10", null); + log.info("[AUTH] status={}", resp.statusCode()); + assertEquals(403, resp.statusCode(), "无 token 应返回 403"); + log.info("[AUTH] 无 token 认证拒绝验证通过"); + } + + /** + * 1.4 更新不存在的会话,应返回 404。 + */ + @Test + @Order(4) + @DisplayName("1.4 更新不存在的会话") + void testUpdateNonExistentSession() throws Exception { + String token = obtainDeveloperToken(); + + ObjectNode body = mapper.createObjectNode(); + body.put("title", "Ghost Session"); + + HttpResponse resp = + httpPatch("/coding-sessions/non-existent-id", body.toString(), token); + log.info("[NOT_FOUND] status={}", resp.statusCode()); + assertEquals(404, resp.statusCode(), "不存在的会话应返回 404"); + log.info("[NOT_FOUND] 更新不存在会话验证通过"); + } + + /** + * 1.5 删除不存在的会话,应返回 404。 + */ + @Test + @Order(5) + @DisplayName("1.5 删除不存在的会话") + void testDeleteNonExistentSession() throws Exception { + String token = obtainDeveloperToken(); + + HttpResponse resp = httpDelete("/coding-sessions/non-existent-id", token); + log.info("[NOT_FOUND] status={}", resp.statusCode()); + assertEquals(404, resp.statusCode(), "不存在的会话应返回 404"); + log.info("[NOT_FOUND] 删除不存在会话验证通过"); + } + + /** + * 1.6 分页查询验证:指定 page 和 size 参数,确认分页字段正确。 + */ + @Test + @Order(6) + @DisplayName("1.6 分页查询") + void testListSessionsPagination() throws Exception { + String token = obtainDeveloperToken(); + + // 创建多个会话 + List sessionIds = new ArrayList<>(); + for (int i = 0; i < 3; i++) { + ObjectNode body = mapper.createObjectNode(); + body.put("cliSessionId", "cli-page-" + i + "-" + UUID.randomUUID()); + body.put("title", "Pagination Test " + i); + + HttpResponse resp = httpPost("/coding-sessions", body.toString(), token); + assertEquals(200, resp.statusCode()); + sessionIds.add(parseSuccessData(resp.body()).get("sessionId").asText()); + } + + try { + // 查询第 1 页,每页 2 条 + HttpResponse resp = httpGet("/coding-sessions?page=0&size=2", token); + assertEquals(200, resp.statusCode()); + + JsonNode result = parseSuccessData(resp.body()); + JsonNode content = result.get("content"); + assertTrue(content.isArray(), "content 应为数组"); + assertTrue(content.size() <= 2, "每页最多 2 条"); + + // 验证分页元数据字段存在 + assertTrue(result.has("totalElements") || result.has("total"), "应包含总数字段"); + log.info("[PAGINATION] page=0, size=2, returned={}", content.size()); + + } finally { + // 清理 + for (String id : sessionIds) { + httpDelete("/coding-sessions/" + id, token); + } + } + } + } + + // ═════════════════════════════════════════════════════════════════ + // 2. WebSocket 连接认证测试 + // ═════════════════════════════════════════════════════════════════ + + @Nested + @DisplayName("2. WebSocket 连接认证") + class WebSocketAuthTest { + + /** + * 2.1 不携带 token 连接 WebSocket,应被拒绝(握手失败或立即关闭)。 + */ + @Test + @DisplayName("2.1 无 token 连接 - 应被拒绝") + void testConnectWithoutToken() throws Exception { + log.info("=== 2.1 无 token WebSocket 连接测试 ==="); + + String wsUrl = WS_BASE + "?provider=qwen-code"; + CountDownLatch closeLatch = new CountDownLatch(1); + AtomicReference closeStatusRef = new AtomicReference<>(); + + StandardWebSocketClient wsClient = new StandardWebSocketClient(); + try { + WebSocketSession session = + wsClient.execute( + new TextWebSocketHandler() { + @Override + public void afterConnectionClosed( + WebSocketSession s, CloseStatus status) { + closeStatusRef.set(status); + closeLatch.countDown(); + } + }, + new WebSocketHttpHeaders(), + URI.create(wsUrl)) + .get(WS_CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS); + + // 如果连接成功了,应该很快被服务端关闭 + boolean closed = closeLatch.await(10, TimeUnit.SECONDS); + if (session.isOpen()) { + session.close(); + } + // 连接应在短时间内被关闭,或握手直接失败 + log.info( + "[NO_TOKEN] 连接状态: closed={}, closeStatus={}", closed, closeStatusRef.get()); + } catch (Exception e) { + // 握手失败也是预期行为 + log.info("[NO_TOKEN] 连接被拒绝: {}", e.getMessage()); + } + log.info("[NO_TOKEN] 无 token 连接测试通过"); + } + + /** + * 2.2 携带无效 token 连接 WebSocket,应被拒绝。 + */ + @Test + @DisplayName("2.2 无效 token 连接 - 应被拒绝") + void testConnectWithInvalidToken() throws Exception { + log.info("=== 2.2 无效 token WebSocket 连接测试 ==="); + + String wsUrl = WS_BASE + "?provider=qwen-code&token=invalid-jwt-token"; + CountDownLatch closeLatch = new CountDownLatch(1); + + StandardWebSocketClient wsClient = new StandardWebSocketClient(); + try { + WebSocketSession session = + wsClient.execute( + new TextWebSocketHandler() { + @Override + public void afterConnectionClosed( + WebSocketSession s, CloseStatus status) { + closeLatch.countDown(); + } + }, + new WebSocketHttpHeaders(), + URI.create(wsUrl)) + .get(WS_CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS); + + boolean closed = closeLatch.await(10, TimeUnit.SECONDS); + if (session.isOpen()) { + session.close(); + } + log.info("[INVALID_TOKEN] 连接已关闭: {}", closed); + } catch (Exception e) { + log.info("[INVALID_TOKEN] 连接被拒绝: {}", e.getMessage()); + } + log.info("[INVALID_TOKEN] 无效 token 连接测试通过"); + } + + /** + * 2.3 携带有效 token 连接 WebSocket,应成功建立连接。 + */ + @Test + @DisplayName("2.3 有效 token 连接 - 应成功") + void testConnectWithValidToken() throws Exception { + log.info("=== 2.3 有效 token WebSocket 连接测试 ==="); + String token = obtainDeveloperToken(); + assertNotNull(token, "获取开发者 token 失败"); + + String wsUrl = WS_BASE + "?provider=qwen-code&token=" + token; + CountDownLatch connectedLatch = new CountDownLatch(1); + + StandardWebSocketClient wsClient = new StandardWebSocketClient(); + WebSocketSession session = + wsClient.execute( + new TextWebSocketHandler() { + @Override + public void afterConnectionEstablished(WebSocketSession s) { + connectedLatch.countDown(); + } + }, + new WebSocketHttpHeaders(), + URI.create(wsUrl)) + .get(WS_CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS); + + try { + boolean connected = connectedLatch.await(5, TimeUnit.SECONDS); + assertTrue(connected, "WebSocket 应成功连接"); + assertTrue(session.isOpen(), "WebSocket session 应处于开放状态"); + log.info("[VALID_TOKEN] WebSocket 连接成功, sessionId={}", session.getId()); + } finally { + if (session.isOpen()) { + session.close(); + } + } + log.info("[VALID_TOKEN] 有效 token 连接测试通过"); + } + + /** + * 2.4 使用不存在的 provider 连接,应被关闭。 + */ + @Test + @DisplayName("2.4 未知 provider 连接 - 应被关闭") + void testConnectWithUnknownProvider() throws Exception { + log.info("=== 2.4 未知 provider WebSocket 连接测试 ==="); + String token = obtainDeveloperToken(); + + String wsUrl = WS_BASE + "?provider=non-existent-provider&token=" + token; + CountDownLatch closeLatch = new CountDownLatch(1); + AtomicReference closeStatusRef = new AtomicReference<>(); + + StandardWebSocketClient wsClient = new StandardWebSocketClient(); + try { + WebSocketSession session = + wsClient.execute( + new TextWebSocketHandler() { + @Override + public void afterConnectionClosed( + WebSocketSession s, CloseStatus status) { + closeStatusRef.set(status); + closeLatch.countDown(); + } + }, + new WebSocketHttpHeaders(), + URI.create(wsUrl)) + .get(WS_CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS); + + // 服务端应检测到未知 provider 后关闭连接 + boolean closed = closeLatch.await(15, TimeUnit.SECONDS); + if (session.isOpen()) { + session.close(); + } + assertTrue(closed, "未知 provider 的连接应被服务端关闭"); + log.info("[UNKNOWN_PROVIDER] closeStatus={}", closeStatusRef.get()); + } catch (Exception e) { + log.info("[UNKNOWN_PROVIDER] 连接失败: {}", e.getMessage()); + } + log.info("[UNKNOWN_PROVIDER] 未知 provider 连接测试通过"); + } + } + + // ═════════════════════════════════════════════════════════════════ + // 3. WebSocket ACP 握手与沙箱初始化测试 + // ═════════════════════════════════════════════════════════════════ + + @Nested + @DisplayName("3. ACP 协议握手与沙箱初始化") + class AcpHandshakeAndInitTest { + + /** + * 3.1 连接后发送 session/config,验证收到 sandbox/status 和 sandbox/init-progress 通知。 + */ + @Test + @DisplayName("3.1 session/config 触发沙箱初始化流程") + void testSessionConfigTriggersSandboxInit() throws Exception { + log.info("=== 3.1 session/config 触发沙箱初始化 ==="); + String token = obtainDeveloperToken(); + + String wsUrl = WS_BASE + "?provider=qwen-code&runtime=remote&token=" + token; + + CountDownLatch statusLatch = new CountDownLatch(1); + CopyOnWriteArrayList allMessages = new CopyOnWriteArrayList<>(); + AtomicReference sandboxStatusRef = new AtomicReference<>(); + + StandardWebSocketClient wsClient = new StandardWebSocketClient(); + WebSocketSession session = + wsClient.execute( + new TextWebSocketHandler() { + @Override + protected void handleTextMessage( + WebSocketSession s, TextMessage message) { + String payload = message.getPayload(); + allMessages.add(payload); + log.info("[INIT] 收到消息: {}", truncate(payload, 200)); + try { + JsonNode node = mapper.readTree(payload); + String method = node.path("method").asText(""); + if ("sandbox/status".equals(method)) { + sandboxStatusRef.set(payload); + statusLatch.countDown(); + } + } catch (Exception e) { + // ignore + } + } + }, + new WebSocketHttpHeaders(), + URI.create(wsUrl)) + .get(WS_CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS); + + try { + assertTrue(session.isOpen(), "WebSocket 应已连接"); + + // 发送 session/config 消息 + String configMsg = buildSessionConfigMessage(null, null, null, null); + log.info("[INIT] 发送 session/config"); + session.sendMessage(new TextMessage(configMsg)); + + // 等待 sandbox/status 通知 + boolean received = statusLatch.await(SANDBOX_INIT_TIMEOUT_SEC, TimeUnit.SECONDS); + + // 验证收到了相关通知 + log.info("[INIT] 收到消息总数: {}", allMessages.size()); + + // 检查是否收到了 sandbox/init-progress 消息 + boolean hasInitProgress = + allMessages.stream().anyMatch(m -> m.contains("sandbox/init-progress")); + log.info("[INIT] 收到 init-progress 通知: {}", hasInitProgress); + + if (received) { + JsonNode statusNode = mapper.readTree(sandboxStatusRef.get()); + String status = statusNode.path("params").path("status").asText(); + log.info("[INIT] sandbox/status = {}", status); + // status 可能是 "ready"、"creating" 或 "error" + assertTrue( + List.of("ready", "creating", "error").contains(status), + "sandbox/status 应为 ready、creating 或 error,实际为: " + status); + } else { + log.warn("[INIT] 未在超时内收到 sandbox/status,可能沙箱环境未配置"); + } + } finally { + if (session.isOpen()) { + session.close(); + } + } + log.info("[INIT] 沙箱初始化流程测试通过"); + } + + /** + * 3.2 连接后发送非 session/config 的消息,应被缓存(pending queue)。 + */ + @Test + @DisplayName("3.2 初始化前消息缓冲") + void testMessageBufferingBeforeInit() throws Exception { + log.info("=== 3.2 初始化前消息缓冲测试 ==="); + String token = obtainDeveloperToken(); + + String wsUrl = WS_BASE + "?provider=qwen-code&runtime=remote&token=" + token; + CopyOnWriteArrayList receivedMessages = new CopyOnWriteArrayList<>(); + + StandardWebSocketClient wsClient = new StandardWebSocketClient(); + WebSocketSession session = + wsClient.execute( + new TextWebSocketHandler() { + @Override + protected void handleTextMessage( + WebSocketSession s, TextMessage message) { + receivedMessages.add(message.getPayload()); + } + }, + new WebSocketHttpHeaders(), + URI.create(wsUrl)) + .get(WS_CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS); + + try { + assertTrue(session.isOpen(), "WebSocket 应已连接"); + + // 在发送 session/config 之前发送一条普通消息 + // 该消息应被缓存到 pendingMessageMap 中 + String earlyMsg = buildInitializeRequest(0); + session.sendMessage(new TextMessage(earlyMsg)); + log.info("[BUFFER] 发送了 initialize 消息(在 session/config 之前)"); + + // 等待一小段时间 + Thread.sleep(2000); + + // 由于 pipeline 尚未启动(等待 session/config), + // 消息应被缓存,不会有 CLI 响应 + // 此时发送 session/config 消息以触发初始化 + String configMsg = buildSessionConfigMessage(null, null, null, null); + session.sendMessage(new TextMessage(configMsg)); + log.info("[BUFFER] 发送 session/config 触发初始化"); + + // 等待一段时间看是否有响应 + Thread.sleep(5000); + log.info("[BUFFER] 收到消息数: {}", receivedMessages.size()); + + // 至少应收到 sandbox/status 或 sandbox/init-progress 消息 + // 缓存的 initialize 消息应在 pipeline ready 后被回放 + for (String msg : receivedMessages) { + log.info("[BUFFER] 消息: {}", truncate(msg, 150)); + } + } finally { + if (session.isOpen()) { + session.close(); + } + } + log.info("[BUFFER] 消息缓冲测试通过"); + } + } + + // ═════════════════════════════════════════════════════════════════ + // 4. 新会话全流程(session/new) + // ═════════════════════════════════════════════════════════════════ + + @Nested + @DisplayName("4. 新会话全流程") + class NewSessionFlowTest { + + /** + * 4.1 完整流程:连接 → session/config → sandbox ready → initialize → session/new。 + * + *

验证 ACP 协议的完整握手过程和新会话创建。 + */ + @Test + @DisplayName("4.1 完整新会话流程") + void testFullNewSessionFlow() throws Exception { + log.info("=== 4.1 完整新会话流程 ==="); + String token = obtainDeveloperToken(); + + String wsUrl = WS_BASE + "?provider=qwen-code&runtime=remote&token=" + token; + + CountDownLatch sandboxReadyLatch = new CountDownLatch(1); + CountDownLatch initResponseLatch = new CountDownLatch(1); + CountDownLatch sessionNewLatch = new CountDownLatch(1); + + AtomicReference initResponseRef = new AtomicReference<>(); + AtomicReference sessionNewResponseRef = new AtomicReference<>(); + CopyOnWriteArrayList notifications = new CopyOnWriteArrayList<>(); + + StandardWebSocketClient wsClient = new StandardWebSocketClient(); + WebSocketSession session = + wsClient.execute( + new TextWebSocketHandler() { + @Override + protected void handleTextMessage( + WebSocketSession s, TextMessage message) { + String payload = message.getPayload(); + log.info( + "[NEW_SESSION] 收到: {}", truncate(payload, 200)); + try { + JsonNode node = mapper.readTree(payload); + + // 通知消息(无 id) + if (node.has("method") && !node.has("id")) { + String method = node.get("method").asText(); + notifications.add(payload); + if ("sandbox/status".equals(method)) { + String st = + node.path("params") + .path("status") + .asText(); + if ("ready".equals(st)) { + sandboxReadyLatch.countDown(); + } + } + return; + } + + // 响应消息(有 id) + if (node.has("id")) { + int id = node.get("id").asInt(); + if (id == 0) { + initResponseRef.set(payload); + initResponseLatch.countDown(); + } else if (id == 1) { + sessionNewResponseRef.set(payload); + sessionNewLatch.countDown(); + } + } + } catch (Exception e) { + log.debug("[NEW_SESSION] 非 JSON: {}", payload); + } + } + }, + new WebSocketHttpHeaders(), + URI.create(wsUrl)) + .get(WS_CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS); + + try { + assertTrue(session.isOpen(), "WebSocket 应已连接"); + + // Step 1: 发送 session/config + log.info("[NEW_SESSION] Step 1: 发送 session/config"); + session.sendMessage( + new TextMessage(buildSessionConfigMessage(null, null, null, null))); + + // 等待沙箱就绪 + boolean sandboxReady = + sandboxReadyLatch.await(SANDBOX_INIT_TIMEOUT_SEC, TimeUnit.SECONDS); + if (!sandboxReady) { + log.warn("[NEW_SESSION] 沙箱未在超时内就绪,跳过后续步骤"); + Assumptions.assumeTrue(false, "沙箱环境未就绪"); + } + log.info("[NEW_SESSION] 沙箱已就绪"); + + // Step 2: 发送 initialize + log.info("[NEW_SESSION] Step 2: 发送 initialize"); + session.sendMessage(new TextMessage(buildInitializeRequest(0))); + + boolean initReceived = + initResponseLatch.await(WS_RESPONSE_TIMEOUT_SEC, TimeUnit.SECONDS); + assertTrue(initReceived, "应在超时内返回 initialize 响应"); + + JsonNode initResult = mapper.readTree(initResponseRef.get()).get("result"); + assertNotNull(initResult, "initialize 应包含 result"); + assertTrue(initResult.has("protocolVersion"), "应包含 protocolVersion"); + assertEquals(1, initResult.get("protocolVersion").asInt()); + log.info("[NEW_SESSION] initialize 成功"); + + // Step 3: 发送 session/new + log.info("[NEW_SESSION] Step 3: 发送 session/new"); + session.sendMessage(new TextMessage(buildSessionNewRequest(1, "/workspace/test"))); + + boolean sessionReceived = + sessionNewLatch.await(WS_RESPONSE_TIMEOUT_SEC, TimeUnit.SECONDS); + assertTrue(sessionReceived, "应在超时内返回 session/new 响应"); + + JsonNode sessionNewRoot = mapper.readTree(sessionNewResponseRef.get()); + + if (sessionNewRoot.has("error")) { + // 未认证也是可接受的结果 + int errorCode = sessionNewRoot.path("error").path("code").asInt(); + if (errorCode == -32000) { + log.info("[NEW_SESSION] session/new 需要认证 (code=-32000),预期行为"); + } else { + log.warn("[NEW_SESSION] session/new 错误: {}", sessionNewRoot.get("error")); + } + } else { + JsonNode sessionResult = sessionNewRoot.get("result"); + assertNotNull(sessionResult, "session/new 应包含 result"); + assertTrue(sessionResult.has("sessionId"), "session/new 结果应包含 sessionId"); + String sessionId = sessionResult.get("sessionId").asText(); + assertFalse(sessionId.isBlank(), "sessionId 不应为空"); + log.info("[NEW_SESSION] sessionId={}", sessionId); + + // 验证 models + if (sessionResult.has("models")) { + JsonNode models = sessionResult.get("models"); + if (models.has("availableModels")) { + assertTrue( + models.get("availableModels").isArray(), + "availableModels 应为数组"); + log.info( + "[NEW_SESSION] 可用模型数: {}", + models.get("availableModels").size()); + } + } + + // 验证 modes + if (sessionResult.has("modes")) { + JsonNode modes = sessionResult.get("modes"); + if (modes.has("availableModes")) { + assertTrue( + modes.get("availableModes").isArray(), "availableModes 应为数组"); + log.info("[NEW_SESSION] 可用模式数: {}", modes.get("availableModes").size()); + } + } + } + + // 验证通知消息 + log.info("[NEW_SESSION] 收到通知总数: {}", notifications.size()); + boolean hasWorkspaceInfo = + notifications.stream().anyMatch(m -> m.contains("workspace/info")); + log.info("[NEW_SESSION] workspace/info 通知: {}", hasWorkspaceInfo); + + } finally { + if (session.isOpen()) { + session.close(); + } + } + log.info("[NEW_SESSION] 完整新会话流程测试通过"); + } + } + + // ═════════════════════════════════════════════════════════════════ + // 5. 加载历史会话(session/load) + // ═════════════════════════════════════════════════════════════════ + + @Nested + @DisplayName("5. 加载历史会话") + class LoadSessionFlowTest { + + /** + * 5.1 通过 REST API 创建会话 → 通过 WebSocket 加载历史会话。 + * + *

完整流程: + * 1. POST /coding-sessions 创建会话记录 + * 2. WebSocket 连接 → session/config → sandbox ready + * 3. initialize → session/load (使用已有 sessionId) + */ + @Test + @DisplayName("5.1 REST 创建 + WebSocket 加载历史会话") + void testLoadExistingSession() throws Exception { + log.info("=== 5.1 加载历史会话测试 ==="); + String token = obtainDeveloperToken(); + + // Step 1: 通过 REST API 创建会话 + String cliSessionId = "cli-load-" + UUID.randomUUID(); + ObjectNode createBody = mapper.createObjectNode(); + createBody.put("cliSessionId", cliSessionId); + createBody.put("title", "Session to Load"); + createBody.put("providerKey", "qwen-code"); + + HttpResponse createResp = + httpPost("/coding-sessions", createBody.toString(), token); + assertEquals(200, createResp.statusCode(), "创建会话应成功"); + JsonNode createResult = parseSuccessData(createResp.body()); + String sessionId = createResult.get("sessionId").asText(); + log.info("[LOAD] 已创建会话 sessionId={}, cliSessionId={}", sessionId, cliSessionId); + + // Step 2: WebSocket 连接 + String wsUrl = WS_BASE + "?provider=qwen-code&runtime=remote&token=" + token; + + CountDownLatch sandboxReadyLatch = new CountDownLatch(1); + CountDownLatch initLatch = new CountDownLatch(1); + CountDownLatch loadLatch = new CountDownLatch(1); + + AtomicReference initResponseRef = new AtomicReference<>(); + AtomicReference loadResponseRef = new AtomicReference<>(); + + StandardWebSocketClient wsClient = new StandardWebSocketClient(); + WebSocketSession wsSession = + wsClient.execute( + new TextWebSocketHandler() { + @Override + protected void handleTextMessage( + WebSocketSession s, TextMessage message) { + String payload = message.getPayload(); + log.info("[LOAD] 收到: {}", truncate(payload, 200)); + try { + JsonNode node = mapper.readTree(payload); + if (node.has("method") && !node.has("id")) { + if ("sandbox/status" + .equals(node.get("method").asText())) { + if ("ready" + .equals( + node.path("params") + .path("status") + .asText())) { + sandboxReadyLatch.countDown(); + } + } + return; + } + if (node.has("id")) { + int id = node.get("id").asInt(); + if (id == 0) { + initResponseRef.set(payload); + initLatch.countDown(); + } else if (id == 1) { + loadResponseRef.set(payload); + loadLatch.countDown(); + } + } + } catch (Exception e) { + // ignore + } + } + }, + new WebSocketHttpHeaders(), + URI.create(wsUrl)) + .get(WS_CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS); + + try { + // Step 3: session/config → 等待沙箱就绪 + wsSession.sendMessage( + new TextMessage(buildSessionConfigMessage(null, null, null, null))); + + boolean sandboxReady = + sandboxReadyLatch.await(SANDBOX_INIT_TIMEOUT_SEC, TimeUnit.SECONDS); + if (!sandboxReady) { + log.warn("[LOAD] 沙箱未就绪,跳过后续步骤"); + Assumptions.assumeTrue(false, "沙箱环境未就绪"); + } + + // Step 4: initialize + wsSession.sendMessage(new TextMessage(buildInitializeRequest(0))); + boolean initReceived = initLatch.await(WS_RESPONSE_TIMEOUT_SEC, TimeUnit.SECONDS); + assertTrue(initReceived, "应收到 initialize 响应"); + + // Step 5: session/load(使用已创建的 cliSessionId) + log.info("[LOAD] 发送 session/load, cliSessionId={}", cliSessionId); + wsSession.sendMessage( + new TextMessage( + buildSessionLoadRequest(1, cliSessionId, "/workspace/test"))); + + boolean loadReceived = loadLatch.await(WS_RESPONSE_TIMEOUT_SEC, TimeUnit.SECONDS); + assertTrue(loadReceived, "应收到 session/load 响应"); + + JsonNode loadRoot = mapper.readTree(loadResponseRef.get()); + log.info("[LOAD] session/load 响应: {}", truncate(loadResponseRef.get(), 300)); + + if (loadRoot.has("error")) { + int errorCode = loadRoot.path("error").path("code").asInt(); + String errorMsg = loadRoot.path("error").path("message").asText(); + log.info("[LOAD] session/load 返回错误: code={}, message={}", errorCode, errorMsg); + // 需要认证 (-32000) 或会话不存在都是可接受的结果 + } else { + JsonNode loadResult = loadRoot.get("result"); + assertNotNull(loadResult, "session/load 应包含 result"); + if (loadResult.has("sessionId")) { + log.info("[LOAD] 加载成功, sessionId={}", loadResult.get("sessionId").asText()); + } + } + } finally { + if (wsSession.isOpen()) { + wsSession.close(); + } + // 清理 + httpDelete("/coding-sessions/" + sessionId, token); + } + log.info("[LOAD] 加载历史会话测试通过"); + } + + /** + * 5.2 加载不存在的会话,验证 session/load 的错误处理。 + */ + @Test + @DisplayName("5.2 加载不存在的会话") + void testLoadNonExistentSession() throws Exception { + log.info("=== 5.2 加载不存在的会话测试 ==="); + String token = obtainDeveloperToken(); + + String wsUrl = WS_BASE + "?provider=qwen-code&runtime=remote&token=" + token; + + CountDownLatch sandboxReadyLatch = new CountDownLatch(1); + CountDownLatch initLatch = new CountDownLatch(1); + CountDownLatch loadLatch = new CountDownLatch(1); + + AtomicReference loadResponseRef = new AtomicReference<>(); + + StandardWebSocketClient wsClient = new StandardWebSocketClient(); + WebSocketSession session = + wsClient.execute( + new TextWebSocketHandler() { + @Override + protected void handleTextMessage( + WebSocketSession s, TextMessage message) { + String payload = message.getPayload(); + try { + JsonNode node = mapper.readTree(payload); + if (node.has("method") && !node.has("id")) { + if ("sandbox/status" + .equals(node.get("method").asText())) { + if ("ready" + .equals( + node.path("params") + .path("status") + .asText())) { + sandboxReadyLatch.countDown(); + } + } + return; + } + if (node.has("id")) { + int id = node.get("id").asInt(); + if (id == 0) { + initLatch.countDown(); + } else if (id == 1) { + loadResponseRef.set(payload); + loadLatch.countDown(); + } + } + } catch (Exception e) { + // ignore + } + } + }, + new WebSocketHttpHeaders(), + URI.create(wsUrl)) + .get(WS_CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS); + + try { + session.sendMessage( + new TextMessage(buildSessionConfigMessage(null, null, null, null))); + + boolean sandboxReady = + sandboxReadyLatch.await(SANDBOX_INIT_TIMEOUT_SEC, TimeUnit.SECONDS); + if (!sandboxReady) { + Assumptions.assumeTrue(false, "沙箱环境未就绪"); + } + + session.sendMessage(new TextMessage(buildInitializeRequest(0))); + assertTrue( + initLatch.await(WS_RESPONSE_TIMEOUT_SEC, TimeUnit.SECONDS), + "应收到 initialize 响应"); + + // 使用一个不存在的 sessionId + String fakeSessionId = "non-existent-" + UUID.randomUUID(); + session.sendMessage( + new TextMessage( + buildSessionLoadRequest(1, fakeSessionId, "/workspace/test"))); + + boolean loadReceived = loadLatch.await(WS_RESPONSE_TIMEOUT_SEC, TimeUnit.SECONDS); + assertTrue(loadReceived, "应收到 session/load 响应"); + + JsonNode loadRoot = mapper.readTree(loadResponseRef.get()); + // 不存在的会话应返回错误 + assertTrue(loadRoot.has("error"), "加载不存在的会话应返回错误响应"); + log.info( + "[LOAD_404] 错误码: {}, 消息: {}", + loadRoot.path("error").path("code").asInt(), + loadRoot.path("error").path("message").asText()); + } finally { + if (session.isOpen()) { + session.close(); + } + } + log.info("[LOAD_404] 加载不存在会话测试通过"); + } + } + + // ═════════════════════════════════════════════════════════════════ + // 6. 对话流程(session/prompt) + // ═════════════════════════════════════════════════════════════════ + + @Nested + @DisplayName("6. 对话流程") + class ConversationFlowTest { + + /** + * 6.1 完整对话流程:session/new → session/prompt。 + * + *

在认证状态下执行完整的对话流程: + * 1. initialize + * 2. session/new → 获取 sessionId + * 3. session/prompt → 验证响应 + */ + @Test + @DisplayName("6.1 新会话 + 对话") + void testNewSessionAndPrompt() throws Exception { + log.info("=== 6.1 新会话 + 对话测试 ==="); + String token = obtainDeveloperToken(); + + String wsUrl = WS_BASE + "?provider=qwen-code&runtime=remote&token=" + token; + + CountDownLatch sandboxReadyLatch = new CountDownLatch(1); + CountDownLatch initLatch = new CountDownLatch(1); + CountDownLatch sessionNewLatch = new CountDownLatch(1); + CountDownLatch promptLatch = new CountDownLatch(1); + + AtomicReference initRef = new AtomicReference<>(); + AtomicReference sessionNewRef = new AtomicReference<>(); + AtomicReference promptRef = new AtomicReference<>(); + CopyOnWriteArrayList sessionUpdates = new CopyOnWriteArrayList<>(); + + StandardWebSocketClient wsClient = new StandardWebSocketClient(); + WebSocketSession session = + wsClient.execute( + new TextWebSocketHandler() { + @Override + protected void handleTextMessage( + WebSocketSession s, TextMessage message) { + String payload = message.getPayload(); + log.info("[PROMPT] 收到: {}", truncate(payload, 200)); + try { + JsonNode node = mapper.readTree(payload); + + // 通知消息 + if (node.has("method") && !node.has("id")) { + String method = node.get("method").asText(); + if ("sandbox/status".equals(method) + && "ready" + .equals( + node.path("params") + .path("status") + .asText())) { + sandboxReadyLatch.countDown(); + } + if ("session/update".equals(method)) { + sessionUpdates.add(payload); + } + return; + } + + // 响应消息 + if (node.has("id")) { + int id = node.get("id").asInt(); + if (id == 0) { + initRef.set(payload); + initLatch.countDown(); + } else if (id == 1) { + sessionNewRef.set(payload); + sessionNewLatch.countDown(); + } else if (id == 2) { + if (node.has("result") + || node.has("error")) { + promptRef.set(payload); + promptLatch.countDown(); + } + } + } + } catch (Exception e) { + // ignore + } + } + }, + new WebSocketHttpHeaders(), + URI.create(wsUrl)) + .get(WS_CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS); + + try { + // session/config + session.sendMessage( + new TextMessage(buildSessionConfigMessage(null, null, null, null))); + + boolean sandboxReady = + sandboxReadyLatch.await(SANDBOX_INIT_TIMEOUT_SEC, TimeUnit.SECONDS); + if (!sandboxReady) { + Assumptions.assumeTrue(false, "沙箱环境未就绪"); + } + + // initialize + session.sendMessage(new TextMessage(buildInitializeRequest(0))); + assertTrue( + initLatch.await(WS_RESPONSE_TIMEOUT_SEC, TimeUnit.SECONDS), + "应收到 initialize 响应"); + + // session/new + session.sendMessage(new TextMessage(buildSessionNewRequest(1, "/workspace/test"))); + assertTrue( + sessionNewLatch.await(WS_RESPONSE_TIMEOUT_SEC, TimeUnit.SECONDS), + "应收到 session/new 响应"); + + JsonNode sessionNewRoot = mapper.readTree(sessionNewRef.get()); + if (sessionNewRoot.has("error")) { + int code = sessionNewRoot.path("error").path("code").asInt(); + if (code == -32000) { + log.info("[PROMPT] 需要认证,跳过 prompt 测试"); + return; + } + fail("session/new 错误: " + sessionNewRoot.get("error")); + } + + String sessionId = sessionNewRoot.path("result").path("sessionId").asText(); + assertFalse(sessionId.isBlank(), "sessionId 不应为空"); + log.info("[PROMPT] sessionId={}", sessionId); + + // session/prompt + String prompt = "Say 'hello' and nothing else"; + session.sendMessage( + new TextMessage(buildSessionPromptRequest(2, sessionId, prompt))); + + boolean promptReceived = + promptLatch.await(SANDBOX_INIT_TIMEOUT_SEC, TimeUnit.SECONDS); + assertTrue(promptReceived, "应收到 session/prompt 响应"); + + JsonNode promptRoot = mapper.readTree(promptRef.get()); + if (promptRoot.has("error")) { + int code = promptRoot.path("error").path("code").asInt(); + String msg = promptRoot.path("error").path("message").asText(); + log.info("[PROMPT] prompt 返回错误: code={}, msg={}", code, msg); + // -32000 认证或 -32603 内部错误都可接受 + } else { + JsonNode promptResult = promptRoot.get("result"); + assertNotNull(promptResult, "prompt 应有 result"); + if (promptResult.has("stopReason")) { + log.info("[PROMPT] stopReason={}", promptResult.get("stopReason").asText()); + } + } + + log.info("[PROMPT] session/update 通知数: {}", sessionUpdates.size()); + } finally { + if (session.isOpen()) { + session.close(); + } + } + log.info("[PROMPT] 对话流程测试通过"); + } + } + + // ═════════════════════════════════════════════════════════════════ + // 7. 连接生命周期与资源清理 + // ═════════════════════════════════════════════════════════════════ + + @Nested + @DisplayName("7. 连接生命周期") + class ConnectionLifecycleTest { + + /** + * 7.1 客户端主动关闭连接,验证服务端资源清理。 + */ + @Test + @DisplayName("7.1 客户端主动断开连接") + void testGracefulDisconnect() throws Exception { + log.info("=== 7.1 客户端主动断开测试 ==="); + String token = obtainDeveloperToken(); + + String wsUrl = WS_BASE + "?provider=qwen-code&token=" + token; + CountDownLatch closeLatch = new CountDownLatch(1); + AtomicReference closeStatusRef = new AtomicReference<>(); + + StandardWebSocketClient wsClient = new StandardWebSocketClient(); + WebSocketSession session = + wsClient.execute( + new TextWebSocketHandler() { + @Override + public void afterConnectionClosed( + WebSocketSession s, CloseStatus status) { + closeStatusRef.set(status); + closeLatch.countDown(); + } + }, + new WebSocketHttpHeaders(), + URI.create(wsUrl)) + .get(WS_CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS); + + assertTrue(session.isOpen(), "连接应已建立"); + log.info("[DISCONNECT] 连接已建立,准备主动关闭"); + + // 主动关闭 + session.close(CloseStatus.NORMAL); + + boolean closed = closeLatch.await(10, TimeUnit.SECONDS); + assertTrue(closed, "连接应被正常关闭"); + assertEquals( + CloseStatus.NORMAL.getCode(), closeStatusRef.get().getCode(), "关闭状态应为 NORMAL"); + log.info("[DISCONNECT] 连接已正常关闭, status={}", closeStatusRef.get()); + } + + /** + * 7.2 连接后不发任何消息等待一段时间,验证 ping 保活机制。 + */ + @Test + @DisplayName("7.2 Ping 保活机制") + void testPingKeepAlive() throws Exception { + log.info("=== 7.2 Ping 保活机制测试 ==="); + String token = obtainDeveloperToken(); + + String wsUrl = WS_BASE + "?provider=qwen-code&token=" + token; + CopyOnWriteArrayList receivedMessages = new CopyOnWriteArrayList<>(); + + StandardWebSocketClient wsClient = new StandardWebSocketClient(); + WebSocketSession session = + wsClient.execute( + new TextWebSocketHandler() { + @Override + protected void handleTextMessage( + WebSocketSession s, TextMessage message) { + receivedMessages.add(message.getPayload()); + } + }, + new WebSocketHttpHeaders(), + URI.create(wsUrl)) + .get(WS_CONNECT_TIMEOUT_SEC, TimeUnit.SECONDS); + + try { + assertTrue(session.isOpen(), "连接应已建立"); + + // 等待 35 秒(ping 间隔为 30 秒) + log.info("[PING] 等待 35 秒检查连接是否存活..."); + Thread.sleep(35_000); + + // 连接应仍然存活(ping 机制保活) + assertTrue(session.isOpen(), "35 秒后连接应仍然存活(ping 保活)"); + log.info("[PING] 连接仍然存活"); + } finally { + if (session.isOpen()) { + session.close(); + } + } + log.info("[PING] Ping 保活测试通过"); + } + } + + // ═════════════════════════════════════════════════════════════════ + // 8. 跨用户隔离测试 + // ═════════════════════════════════════════════════════════════════ + + @Nested + @DisplayName("8. 跨用户隔离") + class CrossUserIsolationTest { + + /** + * 8.1 用户 A 创建的会话,用户 B 不应能访问(更新/删除)。 + * + *

注意:此测试依赖系统中存在至少两个不同的开发者账号。 + * 若只有一个账号则跳过。 + */ + @Test + @DisplayName("8.1 会话数据用户隔离") + void testSessionIsolation() throws Exception { + log.info("=== 8.1 会话用户隔离测试 ==="); + + // 使用默认开发者账号 + String tokenA = obtainDeveloperToken(); + assertNotNull(tokenA, "获取开发者 token A 失败"); + + // 用户 A 创建会话 + ObjectNode createBody = mapper.createObjectNode(); + createBody.put("cliSessionId", "cli-isolation-" + UUID.randomUUID()); + createBody.put("title", "User A's Session"); + + HttpResponse createResp = + httpPost("/coding-sessions", createBody.toString(), tokenA); + assertEquals(200, createResp.statusCode()); + String sessionId = parseSuccessData(createResp.body()).get("sessionId").asText(); + log.info("[ISOLATION] 用户 A 创建会话: sessionId={}", sessionId); + + try { + // 尝试获取用户 B 的 token + String tokenB = obtainDeveloperTokenForUser("user2", "123456"); + if (tokenB == null) { + log.warn("[ISOLATION] 无法获取第二个开发者账号,跳过跨用户测试"); + Assumptions.assumeTrue(false, "需要第二个开发者账号"); + } + + // 用户 B 尝试更新用户 A 的会话 + ObjectNode updateBody = mapper.createObjectNode(); + updateBody.put("title", "Hijacked!"); + + HttpResponse updateResp = + httpPatch("/coding-sessions/" + sessionId, updateBody.toString(), tokenB); + log.info("[ISOLATION] 用户 B 更新: status={}", updateResp.statusCode()); + assertEquals(404, updateResp.statusCode(), "用户 B 不应能更新用户 A 的会话"); + + // 用户 B 尝试删除用户 A 的会话 + HttpResponse deleteResp = + httpDelete("/coding-sessions/" + sessionId, tokenB); + log.info("[ISOLATION] 用户 B 删除: status={}", deleteResp.statusCode()); + assertEquals(404, deleteResp.statusCode(), "用户 B 不应能删除用户 A 的会话"); + + // 用户 B 查询不应看到用户 A 的会话 + HttpResponse listResp = httpGet("/coding-sessions?page=0&size=100", tokenB); + JsonNode listResult = parseSuccessData(listResp.body()); + boolean found = false; + for (JsonNode item : listResult.get("content")) { + if (sessionId.equals(item.path("sessionId").asText())) { + found = true; + break; + } + } + assertFalse(found, "用户 B 不应看到用户 A 的会话"); + log.info("[ISOLATION] 用户 B 列表中未包含用户 A 的会话"); + + } finally { + // 用户 A 清理 + httpDelete("/coding-sessions/" + sessionId, tokenA); + } + log.info("[ISOLATION] 会话用户隔离测试通过"); + } + } + + // ═════════════════════════════════════════════════════════════════ + // 9. 会话数量限制测试 + // ═════════════════════════════════════════════════════════════════ + + @Nested + @DisplayName("9. 会话数量限制") + class SessionLimitTest { + + /** + * 9.1 验证单用户会话数超过 50 时自动清理最旧会话。 + * + *

注意:此测试会创建大量会话,运行时间较长。 + */ + @Test + @DisplayName("9.1 单用户最大 50 个会话限制") + void testMaxSessionsPerUser() throws Exception { + log.info("=== 9.1 会话数量限制测试 ==="); + String token = obtainDeveloperToken(); + + // 先清理已有会话 + HttpResponse listResp = httpGet("/coding-sessions?page=0&size=100", token); + JsonNode listResult = parseSuccessData(listResp.body()); + for (JsonNode item : listResult.get("content")) { + httpDelete("/coding-sessions/" + item.get("sessionId").asText(), token); + } + log.info("[LIMIT] 已清理现有会话"); + + // 创建 51 个会话 + List allSessionIds = new ArrayList<>(); + for (int i = 0; i < 51; i++) { + ObjectNode body = mapper.createObjectNode(); + body.put("cliSessionId", "cli-limit-" + i + "-" + UUID.randomUUID()); + body.put("title", "Limit Test " + i); + + HttpResponse resp = httpPost("/coding-sessions", body.toString(), token); + assertEquals(200, resp.statusCode()); + allSessionIds.add(parseSuccessData(resp.body()).get("sessionId").asText()); + + if (i % 10 == 0) { + log.info("[LIMIT] 已创建 {} 个会话", i + 1); + } + } + + // 查询当前会话数 + HttpResponse finalList = httpGet("/coding-sessions?page=0&size=100", token); + JsonNode finalResult = parseSuccessData(finalList.body()); + int totalSessions = finalResult.get("content").size(); + log.info("[LIMIT] 创建 51 个后,实际会话数: {}", totalSessions); + + // 由于 MAX_SESSIONS_PER_USER = 50,应自动清理最旧的 + assertTrue(totalSessions <= 50, "会话数不应超过 50,实际为: " + totalSessions); + + // 清理 + HttpResponse cleanupList = httpGet("/coding-sessions?page=0&size=100", token); + for (JsonNode item : parseSuccessData(cleanupList.body()).get("content")) { + httpDelete("/coding-sessions/" + item.get("sessionId").asText(), token); + } + log.info("[LIMIT] 会话数量限制测试通过"); + } + } + + // ═════════════════════════════════════════════════════════════════ + // 辅助方法 + // ═════════════════════════════════════════════════════════════════ + + // ── HTTP 工具方法 ── + + private HttpResponse httpGet(String path, String token) + throws IOException, InterruptedException { + HttpRequest.Builder builder = + HttpRequest.newBuilder() + .uri(URI.create(BASE_URL + path)) + .timeout(Duration.ofSeconds(10)) + .GET(); + if (token != null) { + builder.header("Authorization", "Bearer " + token); + } + return httpClient.send(builder.build(), HttpResponse.BodyHandlers.ofString()); + } + + private HttpResponse httpPost(String path, String body, String token) + throws IOException, InterruptedException { + HttpRequest.Builder builder = + HttpRequest.newBuilder() + .uri(URI.create(BASE_URL + path)) + .timeout(Duration.ofSeconds(10)) + .header("Content-Type", "application/json") + .POST(HttpRequest.BodyPublishers.ofString(body)); + if (token != null) { + builder.header("Authorization", "Bearer " + token); + } + return httpClient.send(builder.build(), HttpResponse.BodyHandlers.ofString()); + } + + private HttpResponse httpPatch(String path, String body, String token) + throws IOException, InterruptedException { + HttpRequest.Builder builder = + HttpRequest.newBuilder() + .uri(URI.create(BASE_URL + path)) + .timeout(Duration.ofSeconds(10)) + .header("Content-Type", "application/json") + .method("PATCH", HttpRequest.BodyPublishers.ofString(body)); + if (token != null) { + builder.header("Authorization", "Bearer " + token); + } + return httpClient.send(builder.build(), HttpResponse.BodyHandlers.ofString()); + } + + private HttpResponse httpDelete(String path, String token) + throws IOException, InterruptedException { + HttpRequest.Builder builder = + HttpRequest.newBuilder() + .uri(URI.create(BASE_URL + path)) + .timeout(Duration.ofSeconds(10)) + .DELETE(); + if (token != null) { + builder.header("Authorization", "Bearer " + token); + } + return httpClient.send(builder.build(), HttpResponse.BodyHandlers.ofString()); + } + + // ── Token 获取 ── + + private String obtainDeveloperToken() { + return obtainDeveloperTokenForUser("user", "123456"); + } + + private String obtainDeveloperTokenForUser(String username, String password) { + try { + ObjectNode loginBody = mapper.createObjectNode(); + loginBody.put("username", username); + loginBody.put("password", password); + + HttpResponse resp = httpPost("/developers/login", loginBody.toString(), null); + if (resp.statusCode() != 200) { + log.warn("获取 token 失败: status={}", resp.statusCode()); + return null; + } + + JsonNode body = mapper.readTree(resp.body()); + return body.path("data").path("access_token").asText(null); + } catch (Exception e) { + log.error("获取 token 异常: {}", e.getMessage()); + return null; + } + } + + // ── JSON 解析 ── + + private JsonNode parseSuccessData(String responseBody) throws Exception { + JsonNode root = mapper.readTree(responseBody); + assertEquals("SUCCESS", root.path("code").asText(), "接口应返回 SUCCESS"); + return root.get("data"); + } + + // ── WebSocket JSON-RPC 消息构建 ── + + private String buildInitializeRequest(int id) throws Exception { + ObjectNode fsNode = + mapper.createObjectNode().put("readTextFile", true).put("writeTextFile", true); + ObjectNode capNode = mapper.createObjectNode().put("terminal", true); + capNode.set("fs", fsNode); + ObjectNode infoNode = + mapper.createObjectNode() + .put("name", "himarket-e2e-test") + .put("title", "HiMarket E2E Test") + .put("version", "1.0.0"); + ObjectNode paramsNode = mapper.createObjectNode().put("protocolVersion", 1); + paramsNode.set("clientCapabilities", capNode); + paramsNode.set("clientInfo", infoNode); + ObjectNode rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", id) + .put("method", "initialize"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + private String buildSessionConfigMessage( + String modelProductId, + List mcpServerIds, + List skillIds, + String authToken) + throws Exception { + ObjectNode paramsNode = mapper.createObjectNode(); + if (modelProductId != null) { + paramsNode.put("modelProductId", modelProductId); + } + if (mcpServerIds != null) { + ArrayNode mcpArray = mapper.createArrayNode(); + for (String id : mcpServerIds) { + mcpArray.add(mapper.createObjectNode().put("productId", id)); + } + paramsNode.set("mcpServers", mcpArray); + } + if (skillIds != null) { + ArrayNode skillArray = mapper.createArrayNode(); + for (String id : skillIds) { + skillArray.add(mapper.createObjectNode().put("productId", id)); + } + paramsNode.set("skills", skillArray); + } + if (authToken != null) { + paramsNode.put("authToken", authToken); + } + + ObjectNode rootNode = + mapper.createObjectNode().put("jsonrpc", "2.0").put("method", "session/config"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + private String buildSessionNewRequest(int id, String cwd) throws Exception { + ObjectNode paramsNode = mapper.createObjectNode().put("cwd", cwd); + paramsNode.set("mcpServers", mapper.createArrayNode()); + ObjectNode rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", id) + .put("method", "session/new"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + private String buildSessionLoadRequest(int id, String sessionId, String cwd) throws Exception { + ObjectNode paramsNode = mapper.createObjectNode(); + paramsNode.put("sessionId", sessionId); + paramsNode.put("cwd", cwd); + paramsNode.set("mcpServers", mapper.createArrayNode()); + ObjectNode rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", id) + .put("method", "session/load"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + private String buildSessionPromptRequest(int id, String sessionId, String text) + throws Exception { + ObjectNode textBlock = mapper.createObjectNode().put("type", "text").put("text", text); + ArrayNode promptArray = mapper.createArrayNode().add(textBlock); + + ObjectNode paramsNode = mapper.createObjectNode(); + paramsNode.put("sessionId", sessionId); + paramsNode.set("prompt", promptArray); + + ObjectNode rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", id) + .put("method", "session/prompt"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + // ── 工具方法 ── + + @SuppressWarnings("deprecation") + private boolean isServerRunning() { + try { + var conn = new java.net.URL(BASE_URL + "/cli-providers").openConnection(); + conn.setConnectTimeout(3000); + conn.setReadTimeout(3000); + conn.connect(); + return true; + } catch (Exception e) { + return false; + } + } + + private String truncate(String str, int maxLen) { + if (str == null || str.length() <= maxLen) { + return str; + } + return str.substring(0, maxLen) + "..."; + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/cli/QwenCodeConfigGeneratorMcpTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/cli/QwenCodeConfigGeneratorMcpTest.java new file mode 100644 index 000000000..0368a46c1 --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/cli/QwenCodeConfigGeneratorMcpTest.java @@ -0,0 +1,216 @@ +package com.alibaba.himarket.service.hicoding.cli; + +import static org.junit.jupiter.api.Assertions.*; + +import com.alibaba.himarket.service.hicoding.session.ResolvedSessionConfig; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +/** + * QwenCodeConfigGenerator.generateMcpConfig 单元测试。 + * 验证 MCP Server 配置注入到 .qwen/settings.json 的正确性。 + */ +class QwenCodeConfigGeneratorMcpTest { + + @TempDir Path tempDir; + + private QwenCodeConfigGenerator generator; + private ObjectMapper objectMapper; + + @BeforeEach + void setUp() { + objectMapper = new ObjectMapper(); + generator = new QwenCodeConfigGenerator(objectMapper); + } + + @Test + void generateMcpConfig_nullList_noFileCreated() throws IOException { + generator.generateMcpConfig(tempDir.toString(), null); + + Path configPath = tempDir.resolve(".qwen/settings.json"); + assertFalse(Files.exists(configPath)); + } + + @Test + void generateMcpConfig_emptyList_noFileCreated() throws IOException { + generator.generateMcpConfig(tempDir.toString(), List.of()); + + Path configPath = tempDir.resolve(".qwen/settings.json"); + assertFalse(Files.exists(configPath)); + } + + @Test + void generateMcpConfig_singleServer_correctFormat() throws IOException { + ResolvedSessionConfig.ResolvedMcpEntry entry = new ResolvedSessionConfig.ResolvedMcpEntry(); + entry.setName("my-mcp"); + entry.setUrl("http://example.com/mcp/sse"); + entry.setTransportType("sse"); + + generator.generateMcpConfig(tempDir.toString(), List.of(entry)); + + Map root = readConfig(); + assertNotNull(root.get("mcpServers")); + + @SuppressWarnings("unchecked") + Map mcpServers = (Map) root.get("mcpServers"); + assertEquals(1, mcpServers.size()); + assertTrue(mcpServers.containsKey("my-mcp")); + + @SuppressWarnings("unchecked") + Map serverConfig = (Map) mcpServers.get("my-mcp"); + assertEquals("http://example.com/mcp/sse", serverConfig.get("url")); + assertEquals("sse", serverConfig.get("type")); + assertNull(serverConfig.get("headers")); + } + + @Test + void generateMcpConfig_withHeaders_headersIncluded() throws IOException { + ResolvedSessionConfig.ResolvedMcpEntry entry = new ResolvedSessionConfig.ResolvedMcpEntry(); + entry.setName("auth-mcp"); + entry.setUrl("http://example.com/mcp"); + entry.setTransportType("streamable-http"); + entry.setHeaders(Map.of("Authorization", "Bearer token123")); + + generator.generateMcpConfig(tempDir.toString(), List.of(entry)); + + Map root = readConfig(); + @SuppressWarnings("unchecked") + Map mcpServers = (Map) root.get("mcpServers"); + @SuppressWarnings("unchecked") + Map serverConfig = (Map) mcpServers.get("auth-mcp"); + + assertEquals("http://example.com/mcp", serverConfig.get("url")); + assertEquals("streamable-http", serverConfig.get("type")); + assertNotNull(serverConfig.get("headers")); + + @SuppressWarnings("unchecked") + Map headers = (Map) serverConfig.get("headers"); + assertEquals("Bearer token123", headers.get("Authorization")); + } + + @Test + void generateMcpConfig_multipleServers_allPresent() throws IOException { + ResolvedSessionConfig.ResolvedMcpEntry entry1 = + new ResolvedSessionConfig.ResolvedMcpEntry(); + entry1.setName("server-a"); + entry1.setUrl("http://a.com/mcp/sse"); + entry1.setTransportType("sse"); + + ResolvedSessionConfig.ResolvedMcpEntry entry2 = + new ResolvedSessionConfig.ResolvedMcpEntry(); + entry2.setName("server-b"); + entry2.setUrl("http://b.com/mcp"); + entry2.setTransportType("streamable-http"); + entry2.setHeaders(Map.of("X-Key", "abc")); + + generator.generateMcpConfig(tempDir.toString(), List.of(entry1, entry2)); + + Map root = readConfig(); + @SuppressWarnings("unchecked") + Map mcpServers = (Map) root.get("mcpServers"); + assertEquals(2, mcpServers.size()); + assertTrue(mcpServers.containsKey("server-a")); + assertTrue(mcpServers.containsKey("server-b")); + } + + @Test + void generateMcpConfig_mergeWithExisting_preservesOldEntries() throws IOException { + // 先写入一个已有的 settings.json,包含一个 MCP Server + Path qwenDir = tempDir.resolve(".qwen"); + Files.createDirectories(qwenDir); + Map existing = new LinkedHashMap<>(); + Map existingMcpServers = new LinkedHashMap<>(); + existingMcpServers.put( + "old-server", Map.of("url", "http://old.com/mcp/sse", "type", "sse")); + existing.put("mcpServers", existingMcpServers); + existing.put("someOtherKey", "preserved"); + Files.writeString( + qwenDir.resolve("settings.json"), objectMapper.writeValueAsString(existing)); + + // 注入新的 MCP Server + ResolvedSessionConfig.ResolvedMcpEntry newEntry = + new ResolvedSessionConfig.ResolvedMcpEntry(); + newEntry.setName("new-server"); + newEntry.setUrl("http://new.com/mcp"); + newEntry.setTransportType("streamable-http"); + + generator.generateMcpConfig(tempDir.toString(), List.of(newEntry)); + + Map root = readConfig(); + // 验证其他配置项被保留 + assertEquals("preserved", root.get("someOtherKey")); + + @SuppressWarnings("unchecked") + Map mcpServers = (Map) root.get("mcpServers"); + assertEquals(2, mcpServers.size()); + assertTrue(mcpServers.containsKey("old-server")); + assertTrue(mcpServers.containsKey("new-server")); + } + + @Test + void generateMcpConfig_duplicateName_newOverridesOld() throws IOException { + // 先写入已有配置 + Path qwenDir = tempDir.resolve(".qwen"); + Files.createDirectories(qwenDir); + Map existing = new LinkedHashMap<>(); + Map existingMcpServers = new LinkedHashMap<>(); + existingMcpServers.put("my-mcp", Map.of("url", "http://old.com/mcp/sse", "type", "sse")); + existing.put("mcpServers", existingMcpServers); + Files.writeString( + qwenDir.resolve("settings.json"), objectMapper.writeValueAsString(existing)); + + // 注入同名的新 MCP Server + ResolvedSessionConfig.ResolvedMcpEntry newEntry = + new ResolvedSessionConfig.ResolvedMcpEntry(); + newEntry.setName("my-mcp"); + newEntry.setUrl("http://new.com/mcp"); + newEntry.setTransportType("streamable-http"); + + generator.generateMcpConfig(tempDir.toString(), List.of(newEntry)); + + Map root = readConfig(); + @SuppressWarnings("unchecked") + Map mcpServers = (Map) root.get("mcpServers"); + assertEquals(1, mcpServers.size()); + + @SuppressWarnings("unchecked") + Map serverConfig = (Map) mcpServers.get("my-mcp"); + assertEquals("http://new.com/mcp", serverConfig.get("url")); + assertEquals("streamable-http", serverConfig.get("type")); + } + + @Test + void generateMcpConfig_emptyHeaders_headersNotIncluded() throws IOException { + ResolvedSessionConfig.ResolvedMcpEntry entry = new ResolvedSessionConfig.ResolvedMcpEntry(); + entry.setName("no-headers"); + entry.setUrl("http://example.com/mcp/sse"); + entry.setTransportType("sse"); + entry.setHeaders(Map.of()); + + generator.generateMcpConfig(tempDir.toString(), List.of(entry)); + + Map root = readConfig(); + @SuppressWarnings("unchecked") + Map mcpServers = (Map) root.get("mcpServers"); + @SuppressWarnings("unchecked") + Map serverConfig = (Map) mcpServers.get("no-headers"); + assertNull(serverConfig.get("headers")); + } + + private Map readConfig() throws IOException { + Path configPath = tempDir.resolve(".qwen/settings.json"); + assertTrue(Files.exists(configPath), "settings.json should exist"); + String content = Files.readString(configPath); + return objectMapper.readValue( + content, new TypeReference>() {}); + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/cli/QwenCodeConfigGeneratorSkillTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/cli/QwenCodeConfigGeneratorSkillTest.java new file mode 100644 index 000000000..b0eef500b --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/cli/QwenCodeConfigGeneratorSkillTest.java @@ -0,0 +1,118 @@ +package com.alibaba.himarket.service.hicoding.cli; + +import static org.junit.jupiter.api.Assertions.*; + +import com.alibaba.himarket.service.hicoding.session.ResolvedSessionConfig; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +/** + * QwenCodeConfigGenerator.generateSkillConfig 单元测试。 + * 验证 Skill 配置改为生成 nacos-env.yaml 后的正确性。 + */ +class QwenCodeConfigGeneratorSkillTest { + + @TempDir Path tempDir; + + private QwenCodeConfigGenerator generator; + + @BeforeEach + void setUp() { + generator = new QwenCodeConfigGenerator(new com.fasterxml.jackson.databind.ObjectMapper()); + } + + @Test + void generateSkillConfig_nullList_noFileCreated() throws IOException { + generator.generateSkillConfig(tempDir.toString(), null); + Path nacosDir = tempDir.resolve(".nacos"); + assertFalse(Files.exists(nacosDir)); + } + + @Test + void generateSkillConfig_emptyList_noFileCreated() throws IOException { + generator.generateSkillConfig(tempDir.toString(), List.of()); + Path nacosDir = tempDir.resolve(".nacos"); + assertFalse(Files.exists(nacosDir)); + } + + @Test + void generateSkillConfig_singleSkill_generatesNacosEnvYaml() throws IOException { + ResolvedSessionConfig.ResolvedSkillEntry skill = + new ResolvedSessionConfig.ResolvedSkillEntry(); + skill.setName("java-standards"); + skill.setNacosId("nacos-001"); + skill.setNamespace("public"); + skill.setSkillName("java-coding-standards"); + skill.setServerAddr("http://nacos:8848"); + skill.setUsername("nacos"); + skill.setPassword("nacos"); + + generator.generateSkillConfig(tempDir.toString(), List.of(skill)); + + Path nacosEnvPath = tempDir.resolve(".nacos/nacos-env-nacos-001.yaml"); + assertTrue(Files.exists(nacosEnvPath)); + + String content = Files.readString(nacosEnvPath); + assertTrue(content.contains("host: nacos")); + assertTrue(content.contains("port: 8848")); + assertTrue(content.contains("authType: nacos")); + assertTrue(content.contains("username: nacos")); + assertTrue(content.contains("password: nacos")); + assertTrue(content.contains("namespace: public")); + assertFalse(content.contains("accessKey:")); + } + + @Test + void generateSkillConfig_withAccessKey_generatesAliyunAuthType() throws IOException { + ResolvedSessionConfig.ResolvedSkillEntry skill = + new ResolvedSessionConfig.ResolvedSkillEntry(); + skill.setName("my-skill"); + skill.setNacosId("nacos-002"); + skill.setNamespace("dev"); + skill.setSkillName("my-skill-name"); + skill.setServerAddr("http://nacos:8848"); + skill.setUsername("user"); + skill.setPassword("pass"); + skill.setAccessKey("ak123"); + skill.setSecretKey("sk456"); + + generator.generateSkillConfig(tempDir.toString(), List.of(skill)); + + Path nacosEnvPath = tempDir.resolve(".nacos/nacos-env-nacos-002.yaml"); + assertTrue(Files.exists(nacosEnvPath)); + + String content = Files.readString(nacosEnvPath); + assertTrue(content.contains("authType: aliyun")); + assertTrue(content.contains("accessKey: ak123")); + assertTrue(content.contains("secretKey: sk456")); + } + + @Test + void generateSkillConfig_doesNotWriteSkillsToSettingsJson() throws IOException { + ResolvedSessionConfig.ResolvedSkillEntry skill = + new ResolvedSessionConfig.ResolvedSkillEntry(); + skill.setName("test-skill"); + skill.setNacosId("nacos-001"); + skill.setNamespace("public"); + skill.setSkillName("test"); + skill.setServerAddr("http://nacos:8848"); + skill.setUsername("nacos"); + skill.setPassword("nacos"); + + generator.generateSkillConfig(tempDir.toString(), List.of(skill)); + + // settings.json 不应该被创建(generateSkillConfig 不再写入 JSON) + Path settingsPath = tempDir.resolve(".qwen/settings.json"); + assertFalse(Files.exists(settingsPath)); + } + + @Test + void skillsDirectory_returnsQwenSkillsPath() { + assertEquals(".qwen/skills/", generator.skillsDirectory()); + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/filesystem/PathTraversalPropertyTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/filesystem/PathTraversalPropertyTest.java new file mode 100644 index 000000000..73d18d93b --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/filesystem/PathTraversalPropertyTest.java @@ -0,0 +1,186 @@ +package com.alibaba.himarket.service.hicoding.filesystem; + +import static org.junit.jupiter.api.Assertions.*; + +import net.jqwik.api.*; + +/** + * 路径遍历防护属性测试。 + * + *

Feature: sandbox-runtime-strategy, Property 3: 路径遍历防护 + * + *

Validates: Requirements 6.5 + * + *

对于任意包含路径遍历模式(如 {@code ../}、绝对路径 {@code /etc/passwd}、{@code ..\\} + * 等)的文件路径,FileSystemAdapter 的所有操作(读取、写入、列举、删除)应该拒绝该路径并返回安全错误,不执行任何文件系统操作。 + */ +class PathTraversalPropertyTest { + + private static final String BASE_PATH = "/tmp/test-workspace"; + + // ===== 生成器:包含路径遍历模式的随机路径 ===== + + @Provide + Arbitrary pathsWithDotDotSlash() { + Arbitrary prefix = Arbitraries.of("", "sub/", "a/b/c/", "dir/"); + Arbitrary traversal = Arbitraries.of("../", "../../", "../../../"); + Arbitrary suffix = + Arbitraries.of( + "etc/passwd", + "etc/shadow", + "windows/system32", + "secret.txt", + "root/.ssh/id_rsa", + "var/log/syslog"); + return Combinators.combine(prefix, traversal, suffix).as((p, t, s) -> p + t + s); + } + + @Provide + Arbitrary pathsWithDotDotBackslash() { + Arbitrary prefix = Arbitraries.of("", "sub\\", "a\\b\\", "dir\\"); + Arbitrary traversal = Arbitraries.of("..\\", "..\\..\\", "..\\..\\..\\"); + Arbitrary suffix = + Arbitraries.of( + "windows\\system32", "etc\\passwd", "secret.txt", "Users\\admin\\Desktop"); + return Combinators.combine(prefix, traversal, suffix).as((p, t, s) -> p + t + s); + } + + @Provide + Arbitrary absoluteUnixPaths() { + Arbitrary paths = + Arbitraries.of( + "/etc/passwd", + "/etc/shadow", + "/var/log/syslog", + "/root/.ssh/id_rsa", + "/tmp/malicious", + "/home/user/secret", + "/usr/bin/env", + "/proc/self/environ"); + Arbitrary randomSuffix = + Arbitraries.strings().alpha().ofMinLength(1).ofMaxLength(10); + return Arbitraries.oneOf( + paths, randomSuffix.map(s -> "/" + s), randomSuffix.map(s -> "/" + s + "/" + s)); + } + + @Provide + Arbitrary absoluteWindowsPaths() { + Arbitrary driveLetter = Arbitraries.chars().range('A', 'Z'); + Arbitrary suffix = + Arbitraries.of( + "\\Windows\\System32", + "\\Users\\admin", + "\\Program Files", + "\\temp\\malicious.exe"); + return Combinators.combine(driveLetter, suffix).as((drive, s) -> drive + ":" + s); + } + + @Provide + Arbitrary backslashAbsolutePaths() { + return Arbitraries.of( + "\\etc\\passwd", "\\windows\\system32", "\\tmp\\malicious", "\\var\\log\\syslog"); + } + + @Provide + Arbitrary nullBytePaths() { + Arbitrary prefix = Arbitraries.of("file", "sub/file", "a/b/file"); + Arbitrary suffix = Arbitraries.of(".txt", ".java", ".conf", ""); + return Combinators.combine(prefix, suffix).as((p, s) -> p + "\0" + s); + } + + @Provide + Arbitrary allTraversalPaths() { + return Arbitraries.oneOf( + pathsWithDotDotSlash(), + pathsWithDotDotBackslash(), + absoluteUnixPaths(), + absoluteWindowsPaths(), + backslashAbsolutePaths(), + nullBytePaths(), + Arbitraries.of("..", "sub/..", "a/b/..")); + } + + // ===== Property 3: PathValidator 拒绝所有路径遍历模式 ===== + + /** + * Validates: Requirements 6.5 + * + *

对于任意包含 ../ 模式的路径,PathValidator 应抛出 SecurityException。 + */ + @Property(tries = 200) + void pathValidator_rejectsDotDotSlashPaths( + @ForAll("pathsWithDotDotSlash") String maliciousPath) { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(BASE_PATH, maliciousPath), + "应拒绝包含 ../ 的路径: " + maliciousPath); + } + + /** + * Validates: Requirements 6.5 + * + *

对于任意包含 ..\\ 模式的路径,PathValidator 应抛出 SecurityException。 + */ + @Property(tries = 200) + void pathValidator_rejectsDotDotBackslashPaths( + @ForAll("pathsWithDotDotBackslash") String maliciousPath) { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(BASE_PATH, maliciousPath), + "应拒绝包含 ..\\ 的路径: " + maliciousPath); + } + + /** + * Validates: Requirements 6.5 + * + *

对于任意 Unix 绝对路径,PathValidator 应抛出 SecurityException。 + */ + @Property(tries = 200) + void pathValidator_rejectsAbsoluteUnixPaths(@ForAll("absoluteUnixPaths") String maliciousPath) { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(BASE_PATH, maliciousPath), + "应拒绝 Unix 绝对路径: " + maliciousPath); + } + + /** + * Validates: Requirements 6.5 + * + *

对于任意 Windows 驱动器号绝对路径,PathValidator 应抛出 SecurityException。 + */ + @Property(tries = 100) + void pathValidator_rejectsWindowsDriveLetterPaths( + @ForAll("absoluteWindowsPaths") String maliciousPath) { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(BASE_PATH, maliciousPath), + "应拒绝 Windows 绝对路径: " + maliciousPath); + } + + /** + * Validates: Requirements 6.5 + * + *

对于任意以反斜杠开头的绝对路径,PathValidator 应抛出 SecurityException。 + */ + @Property(tries = 100) + void pathValidator_rejectsBackslashAbsolutePaths( + @ForAll("backslashAbsolutePaths") String maliciousPath) { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(BASE_PATH, maliciousPath), + "应拒绝反斜杠绝对路径: " + maliciousPath); + } + + /** + * Validates: Requirements 6.5 + * + *

对于任意包含 null 字节的路径,PathValidator 应抛出 SecurityException。 + */ + @Property(tries = 100) + void pathValidator_rejectsNullBytePaths(@ForAll("nullBytePaths") String maliciousPath) { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(BASE_PATH, maliciousPath), + "应拒绝包含 null 字节的路径: " + maliciousPath.replace("\0", "\\0")); + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/filesystem/PathValidatorTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/filesystem/PathValidatorTest.java new file mode 100644 index 000000000..95d1304fd --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/filesystem/PathValidatorTest.java @@ -0,0 +1,123 @@ +package com.alibaba.himarket.service.hicoding.filesystem; + +import static org.junit.jupiter.api.Assertions.*; + +import java.nio.file.Path; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +/** + * PathValidator 单元测试。 + */ +class PathValidatorTest { + + @TempDir Path tempDir; + + // ===== 合法路径测试 ===== + + @Test + void validatePath_simpleFileName_returnsResolvedPath() { + Path result = PathValidator.validatePath(tempDir.toString(), "file.txt"); + assertEquals(tempDir.resolve("file.txt"), result); + } + + @Test + void validatePath_nestedPath_returnsResolvedPath() { + Path result = PathValidator.validatePath(tempDir.toString(), "sub/dir/file.txt"); + assertEquals(tempDir.resolve("sub/dir/file.txt"), result); + } + + // ===== 路径遍历攻击测试 ===== + + @Test + void validatePath_dotDotSlash_throwsSecurityException() { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(tempDir.toString(), "../etc/passwd")); + } + + @Test + void validatePath_dotDotBackslash_throwsSecurityException() { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(tempDir.toString(), "..\\windows\\system32")); + } + + @Test + void validatePath_embeddedTraversal_throwsSecurityException() { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(tempDir.toString(), "sub/../../../etc/passwd")); + } + + @Test + void validatePath_doubleDotOnly_throwsSecurityException() { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(tempDir.toString(), "..")); + } + + @Test + void validatePath_trailingDoubleDot_throwsSecurityException() { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(tempDir.toString(), "sub/..")); + } + + // ===== 绝对路径测试 ===== + + @Test + void validatePath_unixAbsolutePath_throwsSecurityException() { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(tempDir.toString(), "/etc/passwd")); + } + + @Test + void validatePath_windowsDriveLetter_throwsSecurityException() { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(tempDir.toString(), "C:\\Windows\\System32")); + } + + @Test + void validatePath_backslashAbsolutePath_throwsSecurityException() { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(tempDir.toString(), "\\etc\\passwd")); + } + + // ===== null 字节测试 ===== + + @Test + void validatePath_nullByte_throwsSecurityException() { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(tempDir.toString(), "file\0.txt")); + } + + // ===== 空值/null 测试 ===== + + @Test + void validatePath_nullBasePath_throwsSecurityException() { + assertThrows(SecurityException.class, () -> PathValidator.validatePath(null, "file.txt")); + } + + @Test + void validatePath_emptyBasePath_throwsSecurityException() { + assertThrows(SecurityException.class, () -> PathValidator.validatePath("", "file.txt")); + } + + @Test + void validatePath_nullRelativePath_throwsSecurityException() { + assertThrows( + SecurityException.class, + () -> PathValidator.validatePath(tempDir.toString(), null)); + } + + @Test + void validatePath_emptyRelativePath_throwsSecurityException() { + assertThrows( + SecurityException.class, () -> PathValidator.validatePath(tempDir.toString(), "")); + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeFaultNotificationPropertyTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeFaultNotificationPropertyTest.java new file mode 100644 index 000000000..55bc3def1 --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeFaultNotificationPropertyTest.java @@ -0,0 +1,204 @@ +package com.alibaba.himarket.service.hicoding.runtime; + +import static org.junit.jupiter.api.Assertions.*; + +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import net.jqwik.api.*; + +/** + * 运行时异常通知一致性属性测试。 + * + *

Feature: sandbox-runtime-strategy, Property 9: 运行时异常通知一致性 + * + *

Validates: Requirements 7.6, 8.5 + * + *

对于任意运行时类型,当运行时实例状态变为异常(通信中断或健康检查失败)时, + * 发送给客户端的通知应该包含故障类型(faultType)、运行时类型(runtimeType) + * 和建议操作(suggestedAction)字段。 + */ +class RuntimeFaultNotificationPropertyTest { + + // ===== 生成器 ===== + + /** 生成随机 SandboxType 值(REMOTE、OPEN_SANDBOX) */ + @Provide + Arbitrary runtimeTypes() { + return Arbitraries.of(SandboxType.REMOTE, SandboxType.OPEN_SANDBOX); + } + + /** 生成随机故障类型,包含预定义常量和随机自定义类型 */ + @Provide + Arbitrary faultTypes() { + Arbitrary predefined = + Arbitraries.of( + RuntimeFaultNotification.FAULT_PROCESS_CRASHED, + RuntimeFaultNotification.FAULT_HEALTH_CHECK_FAILURE, + RuntimeFaultNotification.FAULT_IDLE_TIMEOUT, + RuntimeFaultNotification.FAULT_CONNECTION_LOST); + Arbitrary custom = + Arbitraries.strings() + .alpha() + .ofMinLength(3) + .ofMaxLength(30) + .map(String::toUpperCase); + return Arbitraries.frequencyOf(Tuple.of(3, predefined), Tuple.of(1, custom)); + } + + /** 生成随机建议操作,包含预定义常量和随机自定义操作 */ + @Provide + Arbitrary suggestedActions() { + Arbitrary predefined = + Arbitraries.of( + RuntimeFaultNotification.ACTION_RECONNECT, + RuntimeFaultNotification.ACTION_RESTART, + RuntimeFaultNotification.ACTION_RECREATE); + Arbitrary custom = + Arbitraries.strings() + .alpha() + .ofMinLength(3) + .ofMaxLength(20) + .map(String::toUpperCase); + return Arbitraries.frequencyOf(Tuple.of(3, predefined), Tuple.of(1, custom)); + } + + // ===== 属性测试 ===== + + /** + * Property 9(a): 所有 RuntimeFaultNotification 实例包含非空的必需字段。 + * + *

Validates: Requirements 7.6, 8.5 + * + *

对于任意 faultType、runtimeType 和 suggestedAction 组合, + * 创建的 RuntimeFaultNotification 实例的三个字段均不为 null。 + */ + @Property(tries = 100) + void allNotifications_containNonNullFields( + @ForAll("faultTypes") String faultType, + @ForAll("runtimeTypes") SandboxType runtimeType, + @ForAll("suggestedActions") String suggestedAction) { + + RuntimeFaultNotification notification = + new RuntimeFaultNotification(faultType, runtimeType, suggestedAction); + + assertNotNull(notification.faultType(), "faultType 不应为 null"); + assertNotNull(notification.sandboxType(), "runtimeType 不应为 null"); + assertNotNull(notification.suggestedAction(), "suggestedAction 不应为 null"); + } + + /** + * Property 9(b): 通知正确携带运行时类型。 + * + *

Validates: Requirements 7.6, 8.5 + * + *

对于任意运行时类型和异常场景,创建的通知中 runtimeType 字段 + * 应与传入的运行时类型完全一致。 + */ + @Property(tries = 100) + void notification_carriesCorrectRuntimeType( + @ForAll("faultTypes") String faultType, + @ForAll("runtimeTypes") SandboxType runtimeType, + @ForAll("suggestedActions") String suggestedAction) { + + RuntimeFaultNotification notification = + new RuntimeFaultNotification(faultType, runtimeType, suggestedAction); + + assertEquals(runtimeType, notification.sandboxType(), "通知中的 runtimeType 应与传入值一致"); + } + + /** + * Property 9(c): 通知正确携带故障类型和建议操作。 + * + *

Validates: Requirements 7.6, 8.5 + * + *

对于任意 faultType 和 suggestedAction,创建的通知应原样保留这些值。 + */ + @Property(tries = 100) + void notification_carriesCorrectFaultTypeAndAction( + @ForAll("faultTypes") String faultType, + @ForAll("runtimeTypes") SandboxType runtimeType, + @ForAll("suggestedActions") String suggestedAction) { + + RuntimeFaultNotification notification = + new RuntimeFaultNotification(faultType, runtimeType, suggestedAction); + + assertEquals(faultType, notification.faultType(), "faultType 应与传入值一致"); + assertEquals(suggestedAction, notification.suggestedAction(), "suggestedAction 应与传入值一致"); + } + + /** + * Property 9(d): 不同运行时类型的通知结构一致。 + * + *

Validates: Requirements 7.6, 8.5 + * + *

对于任意故障类型和建议操作,为每种运行时类型创建的通知 + * 都应具有相同的结构(三个非空字段),且仅 runtimeType 字段不同。 + */ + @Property(tries = 100) + void structuralConsistency_acrossRuntimeTypes( + @ForAll("faultTypes") String faultType, + @ForAll("suggestedActions") String suggestedAction) { + + for (SandboxType type : SandboxType.values()) { + RuntimeFaultNotification notification = + new RuntimeFaultNotification(faultType, type, suggestedAction); + + assertNotNull(notification.faultType()); + assertNotNull(notification.sandboxType()); + assertNotNull(notification.suggestedAction()); + + assertEquals(faultType, notification.faultType()); + assertEquals(type, notification.sandboxType()); + assertEquals(suggestedAction, notification.suggestedAction()); + } + } + + /** + * Property 9(e): 相同输入产生相等的通知(record 语义一致性)。 + * + *

Validates: Requirements 8.5 + * + *

对于任意相同的 faultType、runtimeType 和 suggestedAction, + * 两次创建的 RuntimeFaultNotification 应相等(equals/hashCode 一致)。 + */ + @Property(tries = 100) + void sameInputs_produceEqualNotifications( + @ForAll("faultTypes") String faultType, + @ForAll("runtimeTypes") SandboxType runtimeType, + @ForAll("suggestedActions") String suggestedAction) { + + RuntimeFaultNotification a = + new RuntimeFaultNotification(faultType, runtimeType, suggestedAction); + RuntimeFaultNotification b = + new RuntimeFaultNotification(faultType, runtimeType, suggestedAction); + + assertEquals(a, b, "相同输入应产生相等的通知"); + assertEquals(a.hashCode(), b.hashCode(), "相等的通知应有相同的 hashCode"); + } + + /** + * Property 9(f): 不同运行时类型产生不相等的通知。 + * + *

Validates: Requirements 7.6 + * + *

对于任意故障类型和建议操作,如果两个通知的 runtimeType 不同, + * 则这两个通知不应相等。这确保通信中断事件正确区分运行时来源。 + */ + @Property(tries = 100) + void differentRuntimeTypes_produceUnequalNotifications( + @ForAll("faultTypes") String faultType, + @ForAll("suggestedActions") String suggestedAction) { + + SandboxType[] types = SandboxType.values(); + for (int i = 0; i < types.length; i++) { + for (int j = i + 1; j < types.length; j++) { + RuntimeFaultNotification a = + new RuntimeFaultNotification(faultType, types[i], suggestedAction); + RuntimeFaultNotification b = + new RuntimeFaultNotification(faultType, types[j], suggestedAction); + + assertNotEquals( + a, b, String.format("不同运行时类型 (%s vs %s) 的通知不应相等", types[i], types[j])); + } + } + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeFaultNotificationTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeFaultNotificationTest.java new file mode 100644 index 000000000..ebbde1304 --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/runtime/RuntimeFaultNotificationTest.java @@ -0,0 +1,107 @@ +package com.alibaba.himarket.service.hicoding.runtime; + +import static org.junit.jupiter.api.Assertions.*; + +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import org.junit.jupiter.api.Test; + +/** + * RuntimeFaultNotification 单元测试。 + *

+ * 验证统一异常通知格式包含 faultType、runtimeType、suggestedAction 三个字段。 + * Requirements: 8.5 + */ +class RuntimeFaultNotificationTest { + + @Test + void notification_containsAllRequiredFields() { + RuntimeFaultNotification notification = + new RuntimeFaultNotification( + RuntimeFaultNotification.FAULT_PROCESS_CRASHED, + SandboxType.REMOTE, + RuntimeFaultNotification.ACTION_RESTART); + + assertEquals(RuntimeFaultNotification.FAULT_PROCESS_CRASHED, notification.faultType()); + assertEquals(SandboxType.REMOTE, notification.sandboxType()); + assertEquals(RuntimeFaultNotification.ACTION_RESTART, notification.suggestedAction()); + } + + @Test + void notification_healthCheckFailure_forK8s() { + RuntimeFaultNotification notification = + new RuntimeFaultNotification( + RuntimeFaultNotification.FAULT_HEALTH_CHECK_FAILURE, + SandboxType.REMOTE, + RuntimeFaultNotification.ACTION_RECREATE); + + assertEquals(RuntimeFaultNotification.FAULT_HEALTH_CHECK_FAILURE, notification.faultType()); + assertEquals(SandboxType.REMOTE, notification.sandboxType()); + assertEquals(RuntimeFaultNotification.ACTION_RECREATE, notification.suggestedAction()); + } + + @Test + void notification_idleTimeout() { + RuntimeFaultNotification notification = + new RuntimeFaultNotification( + RuntimeFaultNotification.FAULT_IDLE_TIMEOUT, + SandboxType.REMOTE, + RuntimeFaultNotification.ACTION_RECREATE); + + assertEquals(RuntimeFaultNotification.FAULT_IDLE_TIMEOUT, notification.faultType()); + } + + @Test + void notification_connectionLost() { + RuntimeFaultNotification notification = + new RuntimeFaultNotification( + RuntimeFaultNotification.FAULT_CONNECTION_LOST, + SandboxType.REMOTE, + RuntimeFaultNotification.ACTION_RECONNECT); + + assertEquals(RuntimeFaultNotification.FAULT_CONNECTION_LOST, notification.faultType()); + assertEquals(RuntimeFaultNotification.ACTION_RECONNECT, notification.suggestedAction()); + } + + @Test + void notification_equality() { + RuntimeFaultNotification a = new RuntimeFaultNotification("A", SandboxType.REMOTE, "X"); + RuntimeFaultNotification b = new RuntimeFaultNotification("A", SandboxType.REMOTE, "X"); + assertEquals(a, b); + } + + @Test + void notification_inequality_differentFaultType() { + RuntimeFaultNotification a = new RuntimeFaultNotification("A", SandboxType.REMOTE, "X"); + RuntimeFaultNotification b = new RuntimeFaultNotification("B", SandboxType.REMOTE, "X"); + assertNotEquals(a, b); + } + + @Test + void notification_inequality_differentRuntimeType() { + RuntimeFaultNotification a = new RuntimeFaultNotification("A", SandboxType.REMOTE, "X"); + RuntimeFaultNotification b = + new RuntimeFaultNotification("A", SandboxType.OPEN_SANDBOX, "X"); + assertNotEquals(a, b); + } + + @Test + void faultTypeConstants_areDistinct() { + assertNotEquals( + RuntimeFaultNotification.FAULT_PROCESS_CRASHED, + RuntimeFaultNotification.FAULT_HEALTH_CHECK_FAILURE); + assertNotEquals( + RuntimeFaultNotification.FAULT_HEALTH_CHECK_FAILURE, + RuntimeFaultNotification.FAULT_IDLE_TIMEOUT); + assertNotEquals( + RuntimeFaultNotification.FAULT_IDLE_TIMEOUT, + RuntimeFaultNotification.FAULT_CONNECTION_LOST); + } + + @Test + void actionConstants_areDistinct() { + assertNotEquals( + RuntimeFaultNotification.ACTION_RECONNECT, RuntimeFaultNotification.ACTION_RESTART); + assertNotEquals( + RuntimeFaultNotification.ACTION_RESTART, RuntimeFaultNotification.ACTION_RECREATE); + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxInfoPropertyTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxInfoPropertyTest.java new file mode 100644 index 000000000..1b512db80 --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxInfoPropertyTest.java @@ -0,0 +1,235 @@ +package com.alibaba.himarket.service.hicoding.sandbox; + +import static org.junit.jupiter.api.Assertions.*; + +import java.net.URI; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.Map; +import net.jqwik.api.*; + +/** + * SandboxInfo 构建正确性属性测试。 + * + *

Feature: sandbox-runtime-strategy, Property 11: SandboxInfo 构建正确性 + * + *

Validates: Requirements 10.2, 10.4, 10.5 + * + *

对任意 command 和 args 组合,{@code sidecarWsUri()} 构建的 URI 包含正确的 host、port、command 和 URL + * 编码的 args;REMOTE 类型 host 可为任意可达地址;K8S 类型 metadata 包含 + * podName 和 namespace。 + */ +class SandboxInfoPropertyTest { + + // ===== 生成器 ===== + + @Provide + Arbitrary commands() { + return Arbitraries.of("node", "python", "bash", "cli", "npx", "java", "go", "ruby"); + } + + @Provide + Arbitrary nonBlankArgs() { + return Arbitraries.of( + "--help", + "--version", + "-f config.json", + "run test", + "--name=hello world", + "arg1 arg2 arg3", + "--path=/tmp/test file", + "中文参数", + "special&chars=value", + "a=1&b=2", + "hello+world", + "--flag --key=val ue"); + } + + @Provide + Arbitrary nullOrBlankArgs() { + return Arbitraries.of(null, "", " ", "\t", "\n"); + } + + @Provide + Arbitrary sidecarPorts() { + return Arbitraries.integers().between(1024, 65535); + } + + @Provide + Arbitrary k8sPodNames() { + return Arbitraries.strings() + .alpha() + .numeric() + .ofMinLength(3) + .ofMaxLength(20) + .map(s -> "pod-" + s); + } + + @Provide + Arbitrary k8sNamespaces() { + return Arbitraries.of("himarket", "default", "sandbox", "production", "staging"); + } + + @Provide + Arbitrary k8sHosts() { + return Arbitraries.integers() + .between(1, 254) + .list() + .ofSize(4) + .map( + octets -> + octets.get(0) + + "." + + octets.get(1) + + "." + + octets.get(2) + + "." + + octets.get(3)); + } + + // ===== Property 11: sidecarWsUri 构建正确性(非空 args) ===== + + /** + * Validates: Requirements 10.2 + * + *

对任意 command 和非空 args,sidecarWsUri() 构建的 URI 包含正确的 host、port、command 和 URL 编码的 + * args。 + */ + @Property(tries = 200) + void sidecarWsUri_containsCorrectComponentsWithArgs( + @ForAll("commands") String command, + @ForAll("nonBlankArgs") String args, + @ForAll("sidecarPorts") int port) { + SandboxInfo info = + new SandboxInfo( + SandboxType.REMOTE, + "sandbox-" + port, + "sandbox.example.com", + port, + "/workspace", + false, + Map.of()); + + URI uri = info.sidecarWsUri(command, args); + + assertEquals("ws", uri.getScheme(), "URI scheme 应为 ws"); + assertEquals("sandbox.example.com", uri.getHost(), "URI host 应与 SandboxInfo.host 一致"); + assertEquals(port, uri.getPort(), "URI port 应与 SandboxInfo.sidecarPort 一致"); + assertEquals("/", uri.getPath(), "URI path 应为 /"); + + String query = uri.getRawQuery(); + assertNotNull(query, "URI query 不应为 null"); + assertTrue(query.startsWith("command=" + command), "query 应以 command= 开头"); + + String expectedEncodedArgs = URLEncoder.encode(args, StandardCharsets.UTF_8); + assertTrue( + query.contains("args=" + expectedEncodedArgs), + "query 应包含 URL 编码的 args: expected args=" + expectedEncodedArgs + " in " + query); + } + + // ===== Property 11: sidecarWsUri 构建正确性(null 或空白 args) ===== + + /** + * Validates: Requirements 10.2 + * + *

当 args 为 null 或空白时,sidecarWsUri() 构建的 URI 不包含 args 参数。 + */ + @Property(tries = 100) + void sidecarWsUri_omitsArgsWhenNullOrBlank( + @ForAll("commands") String command, + @ForAll("nullOrBlankArgs") String args, + @ForAll("sidecarPorts") int port) { + SandboxInfo info = + new SandboxInfo( + SandboxType.REMOTE, + "sandbox-" + port, + "sandbox.example.com", + port, + "/workspace", + false, + Map.of()); + + URI uri = info.sidecarWsUri(command, args); + + String query = uri.getRawQuery(); + assertNotNull(query); + assertEquals("command=" + command, query, "当 args 为 null/空白时,query 应仅包含 command"); + assertFalse(query.contains("args="), "当 args 为 null/空白时,query 不应包含 args"); + } + + // ===== Property 11: sidecarWsUri 对不同 host 和 port 的正确性 ===== + + /** + * Validates: Requirements 10.2 + * + *

对任意 host 和 port,sidecarWsUri() 构建的 URI 正确反映 SandboxInfo 中的连接信息。 + */ + @Property(tries = 200) + void sidecarWsUri_reflectsHostAndPort( + @ForAll("commands") String command, + @ForAll("k8sHosts") String host, + @ForAll("sidecarPorts") int port) { + SandboxInfo info = + new SandboxInfo( + SandboxType.REMOTE, "pod-abc", host, port, "/workspace", false, Map.of()); + + URI uri = info.sidecarWsUri(command, null); + + assertEquals(host, uri.getHost()); + assertEquals(port, uri.getPort()); + assertEquals("ws://" + host + ":" + port + "/?command=" + command, uri.toString()); + } + + // ===== Property 11: REMOTE 类型 metadata 包含 podName 和 namespace ===== + + /** + * Validates: Requirements 10.5 + * + *

当 SandboxType 为 REMOTE 时,metadata 应包含 podName 和 namespace。 + */ + @Property(tries = 200) + void k8sSandboxInfo_metadataContainsPodNameAndNamespace( + @ForAll("k8sPodNames") String podName, + @ForAll("k8sNamespaces") String namespace, + @ForAll("k8sHosts") String host, + @ForAll("sidecarPorts") int port) { + Map metadata = Map.of("podName", podName, "namespace", namespace); + SandboxInfo info = + new SandboxInfo( + SandboxType.REMOTE, podName, host, port, "/workspace", false, metadata); + + assertEquals(SandboxType.REMOTE, info.type()); + assertNotNull(info.metadata(), "K8S 类型 metadata 不应为 null"); + assertTrue(info.metadata().containsKey("podName"), "K8S metadata 应包含 podName"); + assertTrue(info.metadata().containsKey("namespace"), "K8S metadata 应包含 namespace"); + assertEquals(podName, info.metadata().get("podName")); + assertEquals(namespace, info.metadata().get("namespace")); + } + + // ===== Property 11: sidecarWsUri 返回的 URI 始终可解析 ===== + + /** + * Validates: Requirements 10.2 + * + *

对任意合法输入,sidecarWsUri() 返回的 URI 始终是可解析的有效 URI。 + */ + @Property(tries = 200) + void sidecarWsUri_alwaysReturnsValidUri( + @ForAll("commands") String command, + @ForAll("nonBlankArgs") String args, + @ForAll("k8sHosts") String host, + @ForAll("sidecarPorts") int port) { + SandboxInfo info = + new SandboxInfo( + SandboxType.REMOTE, "pod-test", host, port, "/workspace", false, Map.of()); + + URI uri = info.sidecarWsUri(command, args); + + assertNotNull(uri); + assertDoesNotThrow(uri::getScheme); + assertDoesNotThrow(uri::getHost); + assertDoesNotThrow(uri::getPort); + assertDoesNotThrow(uri::getRawQuery); + assertEquals("ws", uri.getScheme()); + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxProviderRegistryTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxProviderRegistryTest.java new file mode 100644 index 000000000..5bf7885e8 --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/SandboxProviderRegistryTest.java @@ -0,0 +1,81 @@ +package com.alibaba.himarket.service.hicoding.sandbox; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.util.List; +import java.util.Set; +import org.junit.jupiter.api.Test; + +class SandboxProviderRegistryTest { + + private SandboxProvider mockProvider(SandboxType type) { + SandboxProvider provider = mock(SandboxProvider.class); + when(provider.getType()).thenReturn(type); + return provider; + } + + // ===== getProvider 正常查找 ===== + + @Test + void getProvider_registeredType_returnsCorrectProvider() { + SandboxProvider openSandboxProvider = mockProvider(SandboxType.OPEN_SANDBOX); + SandboxProvider remoteProvider = mockProvider(SandboxType.REMOTE); + SandboxProviderRegistry registry = + new SandboxProviderRegistry(List.of(openSandboxProvider, remoteProvider)); + + assertSame(openSandboxProvider, registry.getProvider(SandboxType.OPEN_SANDBOX)); + assertSame(remoteProvider, registry.getProvider(SandboxType.REMOTE)); + } + + @Test + void getProvider_singleProvider_returnsIt() { + SandboxProvider remoteProvider = mockProvider(SandboxType.REMOTE); + SandboxProviderRegistry registry = new SandboxProviderRegistry(List.of(remoteProvider)); + + assertSame(remoteProvider, registry.getProvider(SandboxType.REMOTE)); + } + + // ===== getProvider 未注册类型抛异常 ===== + + @Test + void getProvider_unregisteredType_throwsIllegalArgumentException() { + SandboxProvider openSandboxProvider = mockProvider(SandboxType.OPEN_SANDBOX); + SandboxProviderRegistry registry = + new SandboxProviderRegistry(List.of(openSandboxProvider)); + + IllegalArgumentException ex = + assertThrows( + IllegalArgumentException.class, + () -> registry.getProvider(SandboxType.REMOTE)); + assertTrue(ex.getMessage().contains("REMOTE") || ex.getMessage().contains("不支持")); + } + + @Test + void getProvider_emptyRegistry_throwsIllegalArgumentException() { + SandboxProviderRegistry registry = new SandboxProviderRegistry(List.of()); + + assertThrows( + IllegalArgumentException.class, () -> registry.getProvider(SandboxType.REMOTE)); + } + + // ===== supportedTypes ===== + + @Test + void supportedTypes_returnsAllRegisteredTypes() { + SandboxProvider openSandboxProvider = mockProvider(SandboxType.OPEN_SANDBOX); + SandboxProvider remoteProvider = mockProvider(SandboxType.REMOTE); + SandboxProviderRegistry registry = + new SandboxProviderRegistry(List.of(openSandboxProvider, remoteProvider)); + + Set types = registry.supportedTypes(); + assertEquals(Set.of(SandboxType.OPEN_SANDBOX, SandboxType.REMOTE), types); + } + + @Test + void supportedTypes_emptyRegistry_returnsEmptySet() { + SandboxProviderRegistry registry = new SandboxProviderRegistry(List.of()); + + assertTrue(registry.supportedTypes().isEmpty()); + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitPhasesTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitPhasesTest.java new file mode 100644 index 000000000..df87cb9b5 --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/init/InitPhasesTest.java @@ -0,0 +1,414 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import com.alibaba.himarket.config.AcpProperties.CliProviderConfig; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeStatus; +import com.alibaba.himarket.service.hicoding.sandbox.ConfigFile; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxConfig; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxInfo; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxProvider; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import com.alibaba.himarket.service.hicoding.session.CliSessionConfig; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; + +/** + * 五个初始化阶段的单元测试。 + * + *

使用 mock SandboxProvider 测试各阶段的 execute、verify、shouldExecute 逻辑。 + * + *

Validates: Requirements 5.1-5.9, 8.1-8.5, 9.1-9.3 + */ +class InitPhasesTest { + + private SandboxProvider mockProvider; + private SandboxInfo stubInfo; + + @BeforeEach + void setUp() { + mockProvider = mock(SandboxProvider.class); + stubInfo = + new SandboxInfo( + SandboxType.REMOTE, + "remote-8080", + "sandbox.example.com", + 8080, + "/workspace", + false, + Map.of()); + } + + private InitContext createBasicContext() { + CliProviderConfig providerConfig = new CliProviderConfig(); + return new InitContext(mockProvider, "test-user", null, null, providerConfig, null, null); + } + + private InitContext createContextWithSandboxInfo() { + InitContext context = createBasicContext(); + context.setSandboxInfo(stubInfo); + return context; + } + + private InitContext createContextForConfigInjection(List configs) { + CliProviderConfig providerConfig = new CliProviderConfig(); + providerConfig.setSupportsCustomModel(true); + CliSessionConfig sessionConfig = new CliSessionConfig(); + InitContext context = + new InitContext( + mockProvider, "test-user", null, null, providerConfig, sessionConfig, null); + context.setSandboxInfo(stubInfo); + context.setInjectedConfigs(configs); + return context; + } + + // ========================================================================= + // SandboxAcquirePhase 测试 + // ========================================================================= + + @Nested + @DisplayName("SandboxAcquirePhase (order=100)") + class SandboxAcquirePhaseTest { + + private final SandboxAcquirePhase phase = new SandboxAcquirePhase(); + + @Test + @DisplayName("基本属性:name=sandbox-acquire, order=100, retryPolicy=none") + void basicProperties() { + assertEquals("sandbox-acquire", phase.name()); + assertEquals(100, phase.order()); + assertEquals(0, phase.retryPolicy().maxRetries()); + assertTrue(phase.shouldExecute(createBasicContext())); + } + + @Test + @DisplayName("execute 成功时将 SandboxInfo 存入 InitContext") + void execute_success_storesSandboxInfo() { + SandboxConfig config = + new SandboxConfig( + "user1", SandboxType.REMOTE, "/workspace", Map.of(), null, null); + InitContext context = + new InitContext(mockProvider, "user1", config, null, null, null, null); + when(mockProvider.acquire(config)).thenReturn(stubInfo); + + assertDoesNotThrow(() -> phase.execute(context)); + assertNotNull(context.getSandboxInfo()); + assertEquals("sandbox.example.com", context.getSandboxInfo().host()); + assertEquals("remote-8080", context.getSandboxInfo().sandboxId()); + } + + @Test + @DisplayName("acquire 失败时抛出不可重试的 InitPhaseException") + void execute_acquireFails_throwsNonRetryableException() { + SandboxConfig config = + new SandboxConfig( + "user1", SandboxType.REMOTE, "/workspace", Map.of(), null, null); + InitContext context = + new InitContext(mockProvider, "user1", config, null, null, null, null); + when(mockProvider.acquire(config)).thenThrow(new RuntimeException("Pod 创建超时")); + + InitPhaseException ex = + assertThrows(InitPhaseException.class, () -> phase.execute(context)); + assertEquals("sandbox-acquire", ex.getPhaseName()); + assertFalse(ex.isRetryable(), "沙箱获取失败不应重试"); + assertTrue(ex.getMessage().contains("Pod 创建超时")); + } + + @Test + @DisplayName("verify: sandboxInfo 非空且 host 非空时返回 true") + void verify_withValidSandboxInfo_returnsTrue() { + InitContext context = createContextWithSandboxInfo(); + assertTrue(phase.verify(context)); + } + + @Test + @DisplayName("verify: sandboxInfo 为 null 时返回 false") + void verify_withNullSandboxInfo_returnsFalse() { + InitContext context = createBasicContext(); + assertFalse(phase.verify(context)); + } + + @Test + @DisplayName("verify: host 为空白时返回 false") + void verify_withBlankHost_returnsFalse() { + InitContext context = createBasicContext(); + context.setSandboxInfo( + new SandboxInfo( + SandboxType.REMOTE, + "remote-8080", + " ", + 8080, + "/workspace", + false, + Map.of())); + assertFalse(phase.verify(context)); + } + } + + // ========================================================================= + // FileSystemReadyPhase 测试 + // ========================================================================= + + @Nested + @DisplayName("FileSystemReadyPhase (order=200)") + class FileSystemReadyPhaseTest { + + private final FileSystemReadyPhase phase = new FileSystemReadyPhase(); + + @Test + @DisplayName("基本属性:name=filesystem-ready, order=200, retryPolicy=none") + void basicProperties() { + assertEquals("filesystem-ready", phase.name()); + assertEquals(200, phase.order()); + assertEquals(0, phase.retryPolicy().maxRetries()); + assertTrue(phase.shouldExecute(createBasicContext())); + } + + @Test + @DisplayName("execute 成功:healthCheck 返回 true") + void execute_healthCheckTrue_succeeds() { + InitContext context = createContextWithSandboxInfo(); + when(mockProvider.healthCheck(stubInfo)).thenReturn(true); + + assertDoesNotThrow(() -> phase.execute(context)); + verify(mockProvider).healthCheck(stubInfo); + } + + @Test + @DisplayName("execute 失败:healthCheck 返回 false 时抛出可重试异常") + void execute_healthCheckFalse_throwsRetryableException() { + InitContext context = createContextWithSandboxInfo(); + when(mockProvider.healthCheck(stubInfo)).thenReturn(false); + + InitPhaseException ex = + assertThrows(InitPhaseException.class, () -> phase.execute(context)); + assertEquals("filesystem-ready", ex.getPhaseName()); + assertTrue(ex.isRetryable(), "健康检查失败应可重试"); + assertTrue(ex.getMessage().contains("不可达"), "错误消息应包含'不可达'关键字"); + } + + @Test + @DisplayName("execute 失败:healthCheck 抛出异常时包装为可重试异常") + void execute_healthCheckThrows_wrapsAsRetryableException() { + InitContext context = createContextWithSandboxInfo(); + when(mockProvider.healthCheck(stubInfo)).thenThrow(new RuntimeException("连接超时")); + + InitPhaseException ex = + assertThrows(InitPhaseException.class, () -> phase.execute(context)); + assertEquals("filesystem-ready", ex.getPhaseName()); + assertTrue(ex.isRetryable()); + } + + @Test + @DisplayName("verify: 始终返回 true(不再执行文件系统验证)") + void verify_alwaysReturnsTrue() { + InitContext context = createContextWithSandboxInfo(); + assertTrue(phase.verify(context)); + } + } + + // ========================================================================= + // ConfigInjectionPhase 测试 + // ========================================================================= + + @Nested + @DisplayName("ConfigInjectionPhase (order=300)") + class ConfigInjectionPhaseTest { + + private final ConfigInjectionPhase phase = new ConfigInjectionPhase(); + + @Test + @DisplayName("基本属性:name=config-injection, order=300, retryPolicy=none") + void basicProperties() { + assertEquals("config-injection", phase.name()); + assertEquals(300, phase.order()); + assertEquals(0, phase.retryPolicy().maxRetries()); + } + + @Test + @DisplayName("shouldExecute: sessionConfig 和 supportsCustomModel 都满足时返回 true") + void shouldExecute_conditionsMet_returnsTrue() { + InitContext context = createContextForConfigInjection(List.of()); + assertTrue(phase.shouldExecute(context)); + } + + @Test + @DisplayName("shouldExecute: sessionConfig 为 null 时返回 false") + void shouldExecute_nullSessionConfig_returnsFalse() { + InitContext context = createBasicContext(); // sessionConfig = null + assertFalse(phase.shouldExecute(context)); + } + + @Test + @DisplayName("shouldExecute: supportsCustomModel 为 false 时返回 false") + void shouldExecute_supportsCustomModelFalse_returnsFalse() { + CliProviderConfig providerConfig = new CliProviderConfig(); + providerConfig.setSupportsCustomModel(false); + CliSessionConfig sessionConfig = new CliSessionConfig(); + InitContext context = + new InitContext( + mockProvider, + "test-user", + null, + null, + providerConfig, + sessionConfig, + null); + assertFalse(phase.shouldExecute(context)); + } + + @Test + @DisplayName("execute: 无配置文件时正常返回不抛异常") + void execute_noConfigs_succeeds() { + InitContext context = createContextForConfigInjection(List.of()); + assertDoesNotThrow(() -> phase.execute(context)); + } + + @Test + @DisplayName("execute: 配置文件为 null 时正常返回") + void execute_nullConfigs_succeeds() { + InitContext context = createContextForConfigInjection(null); + context.setInjectedConfigs(null); + assertDoesNotThrow(() -> phase.execute(context)); + } + + @Test + @DisplayName("execute: writeFile 逐个写入成功") + void execute_writeAndReadBackHashMatch_succeeds() throws IOException { + String content = "{\"model\": \"gpt-4\"}"; + ConfigFile config = + new ConfigFile( + "settings.json", content, null, ConfigFile.ConfigType.MODEL_SETTINGS); + InitContext context = createContextForConfigInjection(List.of(config)); + + assertDoesNotThrow(() -> phase.execute(context)); + verify(mockProvider).writeFile(stubInfo, "settings.json", content); + } + + @Test + @DisplayName("execute: writeFile 抛出 IOException 时抛出可重试异常") + void execute_writeFileFails_throwsRetryableException() throws IOException { + String content = "test-content"; + ConfigFile config = + new ConfigFile( + "settings.json", content, null, ConfigFile.ConfigType.MODEL_SETTINGS); + InitContext context = createContextForConfigInjection(List.of(config)); + + doThrow(new IOException("Sidecar writeFile 失败")) + .when(mockProvider) + .writeFile(eq(stubInfo), anyString(), anyString()); + + InitPhaseException ex = + assertThrows(InitPhaseException.class, () -> phase.execute(context)); + assertTrue(ex.isRetryable()); + assertTrue(ex.getMessage().contains("配置注入失败")); + } + + @Test + @DisplayName("execute: 多个配置文件逐个 writeFile 成功") + void execute_multipleConfigs_allSucceed() throws IOException { + String content1 = "{\"model\": \"gpt-4\"}"; + String content2 = "{\"servers\": []}"; + ConfigFile config1 = + new ConfigFile( + "settings.json", content1, null, ConfigFile.ConfigType.MODEL_SETTINGS); + ConfigFile config2 = + new ConfigFile( + ".kiro/mcp.json", content2, null, ConfigFile.ConfigType.MCP_CONFIG); + InitContext context = createContextForConfigInjection(List.of(config1, config2)); + + assertDoesNotThrow(() -> phase.execute(context)); + verify(mockProvider).writeFile(stubInfo, "settings.json", content1); + verify(mockProvider).writeFile(stubInfo, ".kiro/mcp.json", content2); + } + + @Test + @DisplayName("verify: 始终返回 true(不再执行文件读回验证)") + void verify_alwaysReturnsTrue() { + InitContext context = createContextForConfigInjection(List.of()); + assertTrue(phase.verify(context)); + } + } + + // ========================================================================= + // SidecarConnectPhase 测试 + // ========================================================================= + + @Nested + @DisplayName("SidecarConnectPhase (order=400)") + class SidecarConnectPhaseTest { + + private final SidecarConnectPhase phase = new SidecarConnectPhase(); + + @Test + @DisplayName("基本属性:name=sidecar-connect, order=400, retryPolicy=none") + void basicProperties() { + assertEquals("sidecar-connect", phase.name()); + assertEquals(400, phase.order()); + assertEquals(0, phase.retryPolicy().maxRetries()); + assertTrue(phase.shouldExecute(createBasicContext())); + } + + @Test + @DisplayName("execute 成功:将 RuntimeAdapter 存入 InitContext") + void execute_success_storesRuntimeAdapter() { + InitContext context = createContextWithSandboxInfo(); + RuntimeAdapter mockAdapter = mock(RuntimeAdapter.class); + when(mockProvider.connectSidecar(stubInfo, null)).thenReturn(mockAdapter); + + assertDoesNotThrow(() -> phase.execute(context)); + assertSame(mockAdapter, context.getRuntimeAdapter()); + } + + @Test + @DisplayName("execute 失败:connectSidecar 抛出异常时抛出可重试异常") + void execute_connectFails_throwsRetryableException() { + InitContext context = createContextWithSandboxInfo(); + when(mockProvider.connectSidecar(stubInfo, null)) + .thenThrow(new RuntimeException("WebSocket 连接超时")); + + InitPhaseException ex = + assertThrows(InitPhaseException.class, () -> phase.execute(context)); + assertEquals("sidecar-connect", ex.getPhaseName()); + assertTrue(ex.isRetryable(), "Sidecar 连接失败应可重试"); + assertTrue(ex.getMessage().contains("连接失败")); + } + + @Test + @DisplayName("verify: adapter 非空且 status=RUNNING 时返回 true") + void verify_adapterRunning_returnsTrue() { + InitContext context = createBasicContext(); + RuntimeAdapter mockAdapter = mock(RuntimeAdapter.class); + when(mockAdapter.getStatus()).thenReturn(RuntimeStatus.RUNNING); + context.setRuntimeAdapter(mockAdapter); + + assertTrue(phase.verify(context)); + } + + @Test + @DisplayName("verify: adapter 为 null 时返回 false") + void verify_nullAdapter_returnsFalse() { + InitContext context = createBasicContext(); + assertFalse(phase.verify(context)); + } + + @Test + @DisplayName("verify: adapter status 不是 RUNNING 时返回 false") + void verify_adapterNotRunning_returnsFalse() { + InitContext context = createBasicContext(); + RuntimeAdapter mockAdapter = mock(RuntimeAdapter.class); + when(mockAdapter.getStatus()).thenReturn(RuntimeStatus.ERROR); + context.setRuntimeAdapter(mockAdapter); + + assertFalse(phase.verify(context)); + } + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxInitPipelinePropertyTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxInitPipelinePropertyTest.java new file mode 100644 index 000000000..c396bd60a --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxInitPipelinePropertyTest.java @@ -0,0 +1,847 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +import static org.junit.jupiter.api.Assertions.*; + +import com.alibaba.himarket.config.AcpProperties.CliProviderConfig; +import com.alibaba.himarket.service.hicoding.filesystem.FileSystemAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeConfig; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeStatus; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxConfig; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxInfo; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxProvider; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import java.io.IOException; +import java.net.URI; +import java.time.Duration; +import java.util.*; +import java.util.concurrent.CopyOnWriteArrayList; +import net.jqwik.api.*; +import reactor.core.publisher.Flux; + +/** + * SandboxInitPipeline 属性基测试。 + * + *

Feature: sandbox-runtime-strategy + * + *

验证 Pipeline 的编排行为在各种输入组合下满足正确性属性。 + */ +class SandboxInitPipelinePropertyTest { + + // ===== 辅助:Stub SandboxProvider ===== + + /** 简单的 stub provider,记录调用但不做实际操作。 */ + static class StubSandboxProvider implements SandboxProvider { + private final SandboxType type; + + StubSandboxProvider(SandboxType type) { + this.type = type; + } + + @Override + public SandboxType getType() { + return type; + } + + @Override + public SandboxInfo acquire(SandboxConfig config) { + return new SandboxInfo( + type, + "stub-" + type.getValue(), + "localhost", + 8080, + "/workspace", + false, + Map.of()); + } + + @Override + public void release(SandboxInfo info) {} + + @Override + public boolean healthCheck(SandboxInfo info) { + return true; + } + + @Override + public void writeFile(SandboxInfo info, String relativePath, String content) + throws IOException {} + + @Override + public String readFile(SandboxInfo info, String relativePath) throws IOException { + return ""; + } + + @Override + public RuntimeAdapter connectSidecar(SandboxInfo info, RuntimeConfig config) { + return new StubRuntimeAdapter(); + } + + @Override + public URI getSidecarUri(SandboxInfo info, String command, String args) { + return URI.create("ws://localhost:8080/?command=" + command); + } + } + + /** Stub RuntimeAdapter,模拟运行中的适配器。 */ + static class StubRuntimeAdapter implements RuntimeAdapter { + @Override + public SandboxType getType() { + return SandboxType.REMOTE; + } + + @Override + public String start(RuntimeConfig config) { + return "stub"; + } + + @Override + public void send(String jsonLine) {} + + @Override + public Flux stdout() { + return Flux.empty(); + } + + @Override + public RuntimeStatus getStatus() { + return RuntimeStatus.RUNNING; + } + + @Override + public boolean isAlive() { + return true; + } + + @Override + public void close() {} + + @Override + public FileSystemAdapter getFileSystem() { + return null; + } + } + + // ===== 辅助:可配置的 TestPhase ===== + + /** 可配置行为的测试阶段。 */ + static class TestPhase implements InitPhase { + private final String phaseName; + private final int phaseOrder; + private final boolean shouldExec; + private final boolean verifyResult; + private final RetryPolicy policy; + private final boolean failOnExecute; + private final String failMessage; + private final boolean retryable; + private final List executionLog; + + TestPhase( + String phaseName, + int phaseOrder, + boolean shouldExec, + boolean verifyResult, + RetryPolicy policy, + boolean failOnExecute, + String failMessage, + boolean retryable, + List executionLog) { + this.phaseName = phaseName; + this.phaseOrder = phaseOrder; + this.shouldExec = shouldExec; + this.verifyResult = verifyResult; + this.policy = policy; + this.failOnExecute = failOnExecute; + this.failMessage = failMessage; + this.retryable = retryable; + this.executionLog = executionLog; + } + + @Override + public String name() { + return phaseName; + } + + @Override + public int order() { + return phaseOrder; + } + + @Override + public boolean shouldExecute(InitContext context) { + return shouldExec; + } + + @Override + public void execute(InitContext context) throws InitPhaseException { + executionLog.add("execute:" + phaseName); + if (failOnExecute) { + throw new InitPhaseException(phaseName, failMessage, retryable); + } + } + + @Override + public boolean verify(InitContext context) { + executionLog.add("verify:" + phaseName); + return verifyResult; + } + + @Override + public RetryPolicy retryPolicy() { + return policy; + } + } + + // ===== 辅助方法 ===== + + private InitContext createContext(SandboxProvider provider) { + CliProviderConfig providerConfig = new CliProviderConfig(); + return new InitContext(provider, "test-user", null, null, providerConfig, null, null); + } + + // ===== 生成器 ===== + + @Provide + Arbitrary sandboxTypes() { + return Arbitraries.of(SandboxType.REMOTE, SandboxType.OPEN_SANDBOX, SandboxType.E2B); + } + + @Provide + Arbitrary> distinctPhaseOrders() { + return Arbitraries.integers() + .between(1, 1000) + .list() + .ofMinSize(2) + .ofMaxSize(8) + .filter(list -> list.stream().distinct().count() == list.size()); + } + + @Provide + Arbitrary phaseNames() { + return Arbitraries.of( + "phase-alpha", + "phase-beta", + "phase-gamma", + "phase-delta", + "phase-epsilon", + "phase-zeta", + "phase-eta", + "phase-theta"); + } + + @Provide + Arbitrary errorMessages() { + return Arbitraries.of( + "连接超时", "文件系统不可用", "配置注入失败", "Sidecar 未响应", "CLI 启动失败", "权限不足", "网络错误"); + } + + // ========================================================================= + // Property 1: 沙箱类型无关性 + // ========================================================================= + + /** + * Validates: Requirements 4.1, 4.2, 4.3 + * + *

Property 1: 沙箱类型无关性 — 对任意合法初始化配置,Pipeline 的阶段执行顺序和编排行为与 SandboxType + * 无关,给定相同 InitPhase 列表,不同 Provider 实现的编排行为完全一致。 + */ + @Property(tries = 100) + void pipeline_orchestrationBehavior_isIndependentOfSandboxType( + @ForAll("distinctPhaseOrders") List orders) { + // 为每种 SandboxType 创建独立的执行日志 + Map> logsByType = new LinkedHashMap<>(); + Map resultsByType = new LinkedHashMap<>(); + + for (SandboxType type : SandboxType.values()) { + List executionLog = new CopyOnWriteArrayList<>(); + List phases = new ArrayList<>(); + for (int i = 0; i < orders.size(); i++) { + phases.add( + new TestPhase( + "phase-" + i, + orders.get(i), + true, + true, + RetryPolicy.none(), + false, + null, + false, + executionLog)); + } + + StubSandboxProvider provider = new StubSandboxProvider(type); + InitConfig config = new InitConfig(Duration.ofSeconds(30), true, true, false); + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, config); + InitContext context = createContext(provider); + + InitResult result = pipeline.execute(context); + logsByType.put(type, executionLog); + resultsByType.put(type, result); + } + + // 验证所有类型的执行日志完全一致 + List referenceLog = logsByType.get(SandboxType.REMOTE); + for (SandboxType type : SandboxType.values()) { + assertEquals( + referenceLog, + logsByType.get(type), + "SandboxType " + type + " 的执行日志应与 REMOTE 一致"); + } + + // 验证所有类型的结果一致 + boolean referenceSuccess = resultsByType.get(SandboxType.REMOTE).success(); + for (SandboxType type : SandboxType.values()) { + assertEquals( + referenceSuccess, + resultsByType.get(type).success(), + "SandboxType " + type + " 的成功状态应与 REMOTE 一致"); + } + } + + /** + * Validates: Requirements 4.1, 4.2, 4.3 + * + *

Property 1 补充: 含跳过阶段时,不同 SandboxType 的编排行为仍然一致。 + */ + @Property(tries = 100) + void pipeline_withSkippedPhases_behaviorIsIndependentOfSandboxType( + @ForAll("distinctPhaseOrders") List orders, + @ForAll @net.jqwik.api.constraints.Size(min = 1, max = 8) + List shouldExecuteFlags) { + // 确保 flags 和 orders 长度一致 + int size = Math.min(orders.size(), shouldExecuteFlags.size()); + + Map> logsByType = new LinkedHashMap<>(); + + for (SandboxType type : SandboxType.values()) { + List executionLog = new CopyOnWriteArrayList<>(); + List phases = new ArrayList<>(); + for (int i = 0; i < size; i++) { + phases.add( + new TestPhase( + "phase-" + i, + orders.get(i), + shouldExecuteFlags.get(i), + true, + RetryPolicy.none(), + false, + null, + false, + executionLog)); + } + + StubSandboxProvider provider = new StubSandboxProvider(type); + InitConfig config = new InitConfig(Duration.ofSeconds(30), true, true, false); + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, config); + InitContext context = createContext(provider); + + pipeline.execute(context); + logsByType.put(type, executionLog); + } + + List referenceLog = logsByType.get(SandboxType.REMOTE); + for (SandboxType type : SandboxType.values()) { + assertEquals( + referenceLog, + logsByType.get(type), + "含跳过阶段时,SandboxType " + type + " 的执行日志应与 REMOTE 一致"); + } + } + + // ========================================================================= + // Property 3: 阶段执行顺序保证 + // ========================================================================= + + /** + * Validates: Requirements 4.1, 4.2, 4.3, 4.6 + * + *

Property 3: 阶段执行顺序保证 — 各阶段实际执行顺序严格按 order() 升序排列,后续阶段不会在前置阶段 verify() + * 返回 true 之前被调用。 + */ + @Property(tries = 200) + void pipeline_executesPhases_inAscendingOrderWithVerifyBeforeNext( + @ForAll("distinctPhaseOrders") List orders) { + List executionLog = new CopyOnWriteArrayList<>(); + List phases = new ArrayList<>(); + for (int i = 0; i < orders.size(); i++) { + phases.add( + new TestPhase( + "phase-" + i, + orders.get(i), + true, + true, + RetryPolicy.none(), + false, + null, + false, + executionLog)); + } + + InitConfig config = new InitConfig(Duration.ofSeconds(30), true, true, false); + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, config); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + assertTrue(result.success(), "所有阶段应成功执行"); + + // 按 order 排序后的阶段名称 + List sortedPhaseNames = + phases.stream() + .sorted(Comparator.comparingInt(InitPhase::order)) + .map(InitPhase::name) + .toList(); + + // 验证执行日志中 execute 的顺序严格按 order 升序 + List executeEntries = + executionLog.stream().filter(e -> e.startsWith("execute:")).toList(); + List verifyEntries = + executionLog.stream().filter(e -> e.startsWith("verify:")).toList(); + + assertEquals(sortedPhaseNames.size(), executeEntries.size(), "每个阶段应执行一次"); + assertEquals(sortedPhaseNames.size(), verifyEntries.size(), "每个阶段应验证一次"); + + for (int i = 0; i < sortedPhaseNames.size(); i++) { + assertEquals( + "execute:" + sortedPhaseNames.get(i), + executeEntries.get(i), + "第 " + i + " 个执行的阶段应为 " + sortedPhaseNames.get(i)); + } + + // 验证每个阶段的 verify 在下一个阶段的 execute 之前 + for (int i = 0; i < sortedPhaseNames.size() - 1; i++) { + String currentVerify = "verify:" + sortedPhaseNames.get(i); + String nextExecute = "execute:" + sortedPhaseNames.get(i + 1); + int verifyIndex = executionLog.indexOf(currentVerify); + int nextExecIndex = executionLog.indexOf(nextExecute); + assertTrue( + verifyIndex < nextExecIndex, + "阶段 " + + sortedPhaseNames.get(i) + + " 的 verify 应在阶段 " + + sortedPhaseNames.get(i + 1) + + " 的 execute 之前"); + } + } + + /** + * Validates: Requirements 4.2, 4.6 + * + *

Property 3 补充: shouldExecute() 返回 false 的阶段被跳过并记录 PHASE_SKIP 事件。 + */ + @Property(tries = 200) + void pipeline_skipsPhases_whenShouldExecuteReturnsFalse( + @ForAll("distinctPhaseOrders") List orders, + @ForAll @net.jqwik.api.constraints.Size(min = 2, max = 8) + List shouldExecuteFlags) { + int size = Math.min(orders.size(), shouldExecuteFlags.size()); + // 确保至少有一个 false + boolean hasSkip = shouldExecuteFlags.subList(0, size).contains(false); + Assume.that(hasSkip); + + List executionLog = new CopyOnWriteArrayList<>(); + List phases = new ArrayList<>(); + for (int i = 0; i < size; i++) { + phases.add( + new TestPhase( + "phase-" + i, + orders.get(i), + shouldExecuteFlags.get(i), + true, + RetryPolicy.none(), + false, + null, + false, + executionLog)); + } + + InitConfig config = new InitConfig(Duration.ofSeconds(30), true, true, false); + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, config); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + assertTrue(result.success()); + + // 按 order 排序 + List sortedPhases = + phases.stream().sorted(Comparator.comparingInt(InitPhase::order)).toList(); + + for (int i = 0; i < sortedPhases.size(); i++) { + InitPhase phase = sortedPhases.get(i); + boolean shouldExec = phase.shouldExecute(context); + + if (!shouldExec) { + // 跳过的阶段不应出现在执行日志中 + assertFalse( + executionLog.contains("execute:" + phase.name()), + "跳过的阶段 " + phase.name() + " 不应被执行"); + + // 应记录 PHASE_SKIP 事件 + boolean hasSkipEvent = + context.getEvents().stream() + .anyMatch( + e -> + e.phase().equals(phase.name()) + && e.type() + == InitEvent.EventType.PHASE_SKIP); + assertTrue(hasSkipEvent, "跳过的阶段 " + phase.name() + " 应记录 PHASE_SKIP 事件"); + + // 状态应为 SKIPPED + assertEquals( + PhaseStatus.SKIPPED, + context.getPhaseStatuses().get(phase.name()), + "跳过的阶段 " + phase.name() + " 状态应为 SKIPPED"); + } else { + // 执行的阶段应出现在日志中 + assertTrue( + executionLog.contains("execute:" + phase.name()), + "应执行的阶段 " + phase.name() + " 应在执行日志中"); + } + } + } + + // ========================================================================= + // Property 4: 失败结果完整性 + // ========================================================================= + + /** + * Validates: Requirements 4.5, 9.4, 9.5 + * + *

Property 4: 失败结果完整性 — 阶段失败时 InitResult 包含失败阶段名称、错误信息、总耗时、各阶段耗时和完整事件日志。 + */ + @Property(tries = 200) + void pipeline_failureResult_containsCompleteInformation( + @ForAll("distinctPhaseOrders") List orders, + @ForAll @net.jqwik.api.constraints.IntRange(min = 0, max = 7) int failAtIndex, + @ForAll("errorMessages") String errorMsg) { + Assume.that(failAtIndex < orders.size()); + + // 按 order 排序后确定哪个阶段失败 + List sortedOrders = new ArrayList<>(orders); + Collections.sort(sortedOrders); + + List executionLog = new CopyOnWriteArrayList<>(); + List phases = new ArrayList<>(); + for (int i = 0; i < orders.size(); i++) { + boolean shouldFail = (orders.get(i).equals(sortedOrders.get(failAtIndex))); + phases.add( + new TestPhase( + "phase-" + i, + orders.get(i), + true, + true, + RetryPolicy.none(), + shouldFail, + errorMsg, + false, + executionLog)); + } + + InitConfig config = new InitConfig(Duration.ofSeconds(30), true, true, false); + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, config); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + // 验证失败结果 + assertFalse(result.success(), "应返回失败结果"); + + // 失败阶段名称不为空 + assertNotNull(result.failedPhase(), "failedPhase 不应为 null"); + assertFalse(result.failedPhase().isEmpty(), "failedPhase 不应为空"); + + // 错误信息不为空 + assertNotNull(result.errorMessage(), "errorMessage 不应为 null"); + assertEquals(errorMsg, result.errorMessage(), "errorMessage 应与抛出的异常信息一致"); + + // 总耗时不为空且非负 + assertNotNull(result.totalDuration(), "totalDuration 不应为 null"); + assertFalse(result.totalDuration().isNegative(), "totalDuration 不应为负"); + + // 各阶段耗时不为空 + assertNotNull(result.phaseDurations(), "phaseDurations 不应为 null"); + // 失败阶段应在 phaseDurations 中 + assertTrue( + result.phaseDurations().containsKey(result.failedPhase()), + "phaseDurations 应包含失败阶段 " + result.failedPhase()); + + // 事件日志不为空 + assertNotNull(result.events(), "events 不应为 null"); + assertFalse(result.events().isEmpty(), "events 不应为空"); + + // 应包含 PHASE_FAIL 事件 + boolean hasFailEvent = + result.events().stream() + .anyMatch( + e -> + e.phase().equals(result.failedPhase()) + && e.type() == InitEvent.EventType.PHASE_FAIL); + assertTrue(hasFailEvent, "事件日志应包含失败阶段的 PHASE_FAIL 事件"); + + // 失败阶段之前的已完成阶段应有 PHASE_COMPLETE 事件 + for (Map.Entry entry : result.phaseDurations().entrySet()) { + if (!entry.getKey().equals(result.failedPhase())) { + boolean hasCompleteEvent = + result.events().stream() + .anyMatch( + e -> + e.phase().equals(entry.getKey()) + && e.type() + == InitEvent.EventType + .PHASE_COMPLETE); + assertTrue(hasCompleteEvent, "已完成阶段 " + entry.getKey() + " 应有 PHASE_COMPLETE 事件"); + } + } + } + + /** + * Validates: Requirements 4.5, 9.4 + * + *

Property 4 补充: 失败阶段之后的阶段不应被执行。 + */ + @Property(tries = 200) + void pipeline_afterFailure_subsequentPhasesAreNotExecuted( + @ForAll("distinctPhaseOrders") List orders, + @ForAll @net.jqwik.api.constraints.IntRange(min = 0, max = 7) int failAtIndex, + @ForAll("errorMessages") String errorMsg) { + Assume.that(failAtIndex < orders.size()); + + List sortedOrders = new ArrayList<>(orders); + Collections.sort(sortedOrders); + int failOrder = sortedOrders.get(failAtIndex); + + List executionLog = new CopyOnWriteArrayList<>(); + List phases = new ArrayList<>(); + for (int i = 0; i < orders.size(); i++) { + boolean shouldFail = (orders.get(i) == failOrder); + phases.add( + new TestPhase( + "phase-" + i, + orders.get(i), + true, + true, + RetryPolicy.none(), + shouldFail, + errorMsg, + false, + executionLog)); + } + + InitConfig config = new InitConfig(Duration.ofSeconds(30), true, true, false); + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, config); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + assertFalse(result.success()); + + // 失败阶段之后的阶段不应被执行 + for (InitPhase phase : phases) { + if (phase.order() > failOrder) { + assertFalse( + executionLog.contains("execute:" + phase.name()), + "失败阶段之后的阶段 " + phase.name() + " 不应被执行"); + } + } + } + + // ========================================================================= + // Property 6: 超时保证 + // ========================================================================= + + /** + * Validates: Requirements 4.7 + * + *

Property 6: 超时保证 — 总耗时不超过 InitConfig.totalTimeout + 5s 清理时间。 + */ + @Property(tries = 50) + void pipeline_totalDuration_doesNotExceedTimeoutPlusCleanup( + @ForAll @net.jqwik.api.constraints.IntRange(min = 1, max = 3) int timeoutSeconds) { + Duration totalTimeout = Duration.ofSeconds(timeoutSeconds); + Duration maxAllowed = totalTimeout.plusSeconds(5); + + // 创建一个会阻塞超过 timeout 的阶段 + List executionLog = new CopyOnWriteArrayList<>(); + InitPhase slowPhase = + new InitPhase() { + @Override + public String name() { + return "slow-phase"; + } + + @Override + public int order() { + return 100; + } + + @Override + public boolean shouldExecute(InitContext context) { + return true; + } + + @Override + public void execute(InitContext context) throws InitPhaseException { + executionLog.add("execute:slow-phase"); + // 模拟耗时操作,但不会无限阻塞 + try { + Thread.sleep(500); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + + @Override + public boolean verify(InitContext context) { + return true; + } + + @Override + public RetryPolicy retryPolicy() { + return RetryPolicy.none(); + } + }; + + // 创建多个阶段,总执行时间会超过 timeout + List phases = new ArrayList<>(); + for (int i = 0; i < 20; i++) { + final int idx = i; + phases.add( + new InitPhase() { + @Override + public String name() { + return "phase-" + idx; + } + + @Override + public int order() { + return 100 + idx; + } + + @Override + public boolean shouldExecute(InitContext context) { + return true; + } + + @Override + public void execute(InitContext context) throws InitPhaseException { + try { + Thread.sleep(300); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + + @Override + public boolean verify(InitContext context) { + return true; + } + + @Override + public RetryPolicy retryPolicy() { + return RetryPolicy.none(); + } + }); + } + + InitConfig config = new InitConfig(totalTimeout, true, true, false); + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, config); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + long startMs = System.currentTimeMillis(); + InitResult result = pipeline.execute(context); + long actualMs = System.currentTimeMillis() - startMs; + Duration actualDuration = Duration.ofMillis(actualMs); + + // 总耗时不应超过 timeout + 5s 清理时间 + assertTrue( + actualDuration.compareTo(maxAllowed) <= 0, + "总耗时 " + + actualDuration.toMillis() + + "ms 不应超过 " + + maxAllowed.toMillis() + + "ms (timeout + 5s)"); + + // 如果超时了,结果应为失败 + if (!result.success()) { + assertNotNull(result.failedPhase(), "超时失败时 failedPhase 不应为 null"); + assertNotNull(result.totalDuration(), "超时失败时 totalDuration 不应为 null"); + } + } + + /** + * Validates: Requirements 4.7 + * + *

Property 6 补充: 超时时 Pipeline 返回失败结果并包含超时信息。 + */ + @Property(tries = 30) + void pipeline_onTimeout_returnsFailureWithTimeoutInfo( + @ForAll @net.jqwik.api.constraints.IntRange(min = 1, max = 2) int timeoutSeconds) { + Duration totalTimeout = Duration.ofSeconds(timeoutSeconds); + + // 第一个阶段正常,第二个阶段会导致超时检查触发 + List phases = new ArrayList<>(); + // 第一个阶段耗时接近 timeout + phases.add( + new InitPhase() { + @Override + public String name() { + return "long-phase"; + } + + @Override + public int order() { + return 100; + } + + @Override + public boolean shouldExecute(InitContext context) { + return true; + } + + @Override + public void execute(InitContext context) throws InitPhaseException { + try { + // 睡眠超过 timeout,使得下一个阶段开始前超时检查触发 + Thread.sleep(totalTimeout.toMillis() + 200); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + + @Override + public boolean verify(InitContext context) { + return true; + } + + @Override + public RetryPolicy retryPolicy() { + return RetryPolicy.none(); + } + }); + // 第二个阶段 — 超时检查在此阶段开始前触发 + phases.add( + new TestPhase( + "after-timeout-phase", + 200, + true, + true, + RetryPolicy.none(), + false, + null, + false, + new CopyOnWriteArrayList<>())); + + InitConfig config = new InitConfig(totalTimeout, true, true, false); + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, config); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertFalse(result.success(), "超时后应返回失败结果"); + assertNotNull(result.failedPhase(), "超时失败时 failedPhase 不应为 null"); + assertNotNull(result.errorMessage(), "超时失败时 errorMessage 不应为 null"); + assertTrue( + result.errorMessage().contains("超时"), "超时错误信息应包含'超时'关键字: " + result.errorMessage()); + assertNotNull(result.totalDuration(), "超时失败时 totalDuration 不应为 null"); + assertNotNull(result.events(), "超时失败时 events 不应为 null"); + assertFalse(result.events().isEmpty(), "超时失败时 events 不应为空"); + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxInitPipelineTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxInitPipelineTest.java new file mode 100644 index 000000000..c885f6ec4 --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/sandbox/init/SandboxInitPipelineTest.java @@ -0,0 +1,1145 @@ +package com.alibaba.himarket.service.hicoding.sandbox.init; + +import static org.junit.jupiter.api.Assertions.*; + +import com.alibaba.himarket.config.AcpProperties.CliProviderConfig; +import com.alibaba.himarket.service.hicoding.filesystem.FileSystemAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeAdapter; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeConfig; +import com.alibaba.himarket.service.hicoding.runtime.RuntimeStatus; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxConfig; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxInfo; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxProvider; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import java.io.IOException; +import java.net.URI; +import java.time.Duration; +import java.util.*; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.atomic.AtomicInteger; +import org.junit.jupiter.api.Test; +import reactor.core.publisher.Flux; + +/** + * SandboxInitPipeline 单元测试。 + * + *

验证 Pipeline 的阶段顺序执行、跳过逻辑、失败快速退出、重试、超时终止和 resumeFrom 恢复执行。 + * + *

Validates: Requirements 4.1, 4.2, 4.3, 4.4, 4.5, 4.7, 4.8 + */ +class SandboxInitPipelineTest { + + // ===== 辅助:Stub SandboxProvider ===== + + static class StubSandboxProvider implements SandboxProvider { + private final SandboxType type; + + StubSandboxProvider(SandboxType type) { + this.type = type; + } + + @Override + public SandboxType getType() { + return type; + } + + @Override + public SandboxInfo acquire(SandboxConfig config) { + return new SandboxInfo( + type, + "stub-" + type.getValue(), + "localhost", + 8080, + "/workspace", + false, + Map.of()); + } + + @Override + public void release(SandboxInfo info) {} + + @Override + public boolean healthCheck(SandboxInfo info) { + return true; + } + + @Override + public void writeFile(SandboxInfo info, String relativePath, String content) + throws IOException {} + + @Override + public String readFile(SandboxInfo info, String relativePath) throws IOException { + return ""; + } + + @Override + public RuntimeAdapter connectSidecar(SandboxInfo info, RuntimeConfig config) { + return new StubRuntimeAdapter(); + } + + @Override + public URI getSidecarUri(SandboxInfo info, String command, String args) { + return URI.create("ws://localhost:8080/?command=" + command); + } + } + + // ===== 辅助:Stub RuntimeAdapter ===== + + static class StubRuntimeAdapter implements RuntimeAdapter { + @Override + public SandboxType getType() { + return SandboxType.REMOTE; + } + + @Override + public String start(RuntimeConfig config) { + return "stub"; + } + + @Override + public void send(String jsonLine) {} + + @Override + public Flux stdout() { + return Flux.empty(); + } + + @Override + public RuntimeStatus getStatus() { + return RuntimeStatus.RUNNING; + } + + @Override + public boolean isAlive() { + return true; + } + + @Override + public void close() {} + + @Override + public FileSystemAdapter getFileSystem() { + return null; + } + } + + // ===== 辅助:可配置的 TestPhase ===== + + static class TestPhase implements InitPhase { + private final String phaseName; + private final int phaseOrder; + private final boolean shouldExec; + private final boolean verifyResult; + private final RetryPolicy policy; + private final boolean failOnExecute; + private final String failMessage; + private final boolean retryable; + private final List executionLog; + + TestPhase( + String phaseName, + int phaseOrder, + boolean shouldExec, + boolean verifyResult, + RetryPolicy policy, + boolean failOnExecute, + String failMessage, + boolean retryable, + List executionLog) { + this.phaseName = phaseName; + this.phaseOrder = phaseOrder; + this.shouldExec = shouldExec; + this.verifyResult = verifyResult; + this.policy = policy; + this.failOnExecute = failOnExecute; + this.failMessage = failMessage; + this.retryable = retryable; + this.executionLog = executionLog; + } + + @Override + public String name() { + return phaseName; + } + + @Override + public int order() { + return phaseOrder; + } + + @Override + public boolean shouldExecute(InitContext context) { + return shouldExec; + } + + @Override + public void execute(InitContext context) throws InitPhaseException { + executionLog.add("execute:" + phaseName); + if (failOnExecute) { + throw new InitPhaseException(phaseName, failMessage, retryable); + } + } + + @Override + public boolean verify(InitContext context) { + executionLog.add("verify:" + phaseName); + return verifyResult; + } + + @Override + public RetryPolicy retryPolicy() { + return policy; + } + } + + // ===== 辅助:支持第 N 次成功的 TestPhase ===== + + static class RetryableTestPhase implements InitPhase { + private final String phaseName; + private final int phaseOrder; + private final RetryPolicy policy; + private final int succeedOnAttempt; + private final AtomicInteger attemptCount = new AtomicInteger(0); + private final List executionLog; + + RetryableTestPhase( + String phaseName, + int phaseOrder, + RetryPolicy policy, + int succeedOnAttempt, + List executionLog) { + this.phaseName = phaseName; + this.phaseOrder = phaseOrder; + this.policy = policy; + this.succeedOnAttempt = succeedOnAttempt; + this.executionLog = executionLog; + } + + @Override + public String name() { + return phaseName; + } + + @Override + public int order() { + return phaseOrder; + } + + @Override + public boolean shouldExecute(InitContext context) { + return true; + } + + @Override + public void execute(InitContext context) throws InitPhaseException { + int attempt = attemptCount.incrementAndGet(); + executionLog.add("execute:" + phaseName + ":attempt-" + attempt); + if (attempt < succeedOnAttempt) { + throw new InitPhaseException(phaseName, "第 " + attempt + " 次失败", true); + } + } + + @Override + public boolean verify(InitContext context) { + executionLog.add("verify:" + phaseName); + return true; + } + + @Override + public RetryPolicy retryPolicy() { + return policy; + } + + public int getAttemptCount() { + return attemptCount.get(); + } + } + + // ===== 辅助方法 ===== + + private InitContext createContext(SandboxProvider provider) { + CliProviderConfig providerConfig = new CliProviderConfig(); + return new InitContext(provider, "test-user", null, null, providerConfig, null, null); + } + + private InitConfig defaultConfig() { + return new InitConfig(Duration.ofSeconds(30), true, true, false); + } + + // ========================================================================= + // 测试:阶段顺序执行 (Requirements 4.1) + // ========================================================================= + + @Test + void execute_phasesRunInAscendingOrder() { + List log = new CopyOnWriteArrayList<>(); + List phases = + List.of( + new TestPhase( + "phase-c", + 300, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log), + new TestPhase( + "phase-a", + 100, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log), + new TestPhase( + "phase-b", + 200, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertTrue(result.success()); + // 验证执行顺序为 a -> b -> c + List execEntries = log.stream().filter(e -> e.startsWith("execute:")).toList(); + assertEquals(List.of("execute:phase-a", "execute:phase-b", "execute:phase-c"), execEntries); + } + + @Test + void execute_verifyCalledAfterExecuteBeforeNextPhase() { + List log = new CopyOnWriteArrayList<>(); + List phases = + List.of( + new TestPhase( + "phase-1", + 100, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log), + new TestPhase( + "phase-2", + 200, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertTrue(result.success()); + // 期望顺序: execute:1 -> verify:1 -> execute:2 -> verify:2 + assertEquals( + List.of("execute:phase-1", "verify:phase-1", "execute:phase-2", "verify:phase-2"), + log); + } + + @Test + void execute_recordsPhaseStartAndCompleteEvents() { + List log = new CopyOnWriteArrayList<>(); + List phases = + List.of( + new TestPhase( + "phase-1", + 100, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertTrue(result.success()); + // 验证事件记录 + boolean hasStart = + context.getEvents().stream() + .anyMatch( + e -> + e.phase().equals("phase-1") + && e.type() == InitEvent.EventType.PHASE_START); + boolean hasComplete = + context.getEvents().stream() + .anyMatch( + e -> + e.phase().equals("phase-1") + && e.type() == InitEvent.EventType.PHASE_COMPLETE); + assertTrue(hasStart, "应记录 PHASE_START 事件"); + assertTrue(hasComplete, "应记录 PHASE_COMPLETE 事件"); + } + + @Test + void execute_resultContainsPhaseDurations() { + List log = new CopyOnWriteArrayList<>(); + List phases = + List.of( + new TestPhase( + "phase-1", + 100, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log), + new TestPhase( + "phase-2", + 200, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertTrue(result.success()); + assertNotNull(result.phaseDurations()); + assertTrue(result.phaseDurations().containsKey("phase-1")); + assertTrue(result.phaseDurations().containsKey("phase-2")); + assertNotNull(result.totalDuration()); + assertFalse(result.totalDuration().isNegative()); + } + + // ========================================================================= + // 测试:shouldExecute 返回 false 时跳过阶段 (Requirements 4.2) + // ========================================================================= + + @Test + void execute_skipsPhaseWhenShouldExecuteReturnsFalse() { + List log = new CopyOnWriteArrayList<>(); + List phases = + List.of( + new TestPhase( + "phase-1", + 100, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log), + new TestPhase( + "skipped", + 200, + false, + true, + RetryPolicy.none(), + false, + null, + false, + log), + new TestPhase( + "phase-3", + 300, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertTrue(result.success()); + // 跳过的阶段不应出现在执行日志中 + assertFalse(log.contains("execute:skipped")); + // 其他阶段正常执行 + assertTrue(log.contains("execute:phase-1")); + assertTrue(log.contains("execute:phase-3")); + } + + @Test + void execute_recordsPhaseSkipEvent() { + List log = new CopyOnWriteArrayList<>(); + List phases = + List.of( + new TestPhase( + "skipped", + 100, + false, + true, + RetryPolicy.none(), + false, + null, + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertTrue(result.success()); + boolean hasSkipEvent = + context.getEvents().stream() + .anyMatch( + e -> + e.phase().equals("skipped") + && e.type() == InitEvent.EventType.PHASE_SKIP); + assertTrue(hasSkipEvent, "应记录 PHASE_SKIP 事件"); + assertEquals(PhaseStatus.SKIPPED, context.getPhaseStatuses().get("skipped")); + } + + @Test + void execute_allPhasesSkipped_returnsSuccess() { + List log = new CopyOnWriteArrayList<>(); + List phases = + List.of( + new TestPhase( + "skip-1", + 100, + false, + true, + RetryPolicy.none(), + false, + null, + false, + log), + new TestPhase( + "skip-2", + 200, + false, + true, + RetryPolicy.none(), + false, + null, + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertTrue(result.success()); + assertTrue(log.isEmpty(), "跳过的阶段不应有执行日志"); + } + + // ========================================================================= + // 测试:阶段失败后快速退出 (Requirements 4.5) + // ========================================================================= + + @Test + void execute_failsImmediatelyWhenPhaseThrows() { + List log = new CopyOnWriteArrayList<>(); + List phases = + List.of( + new TestPhase( + "phase-1", + 100, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log), + new TestPhase( + "fail-phase", + 200, + true, + true, + RetryPolicy.none(), + true, + "模拟失败", + false, + log), + new TestPhase( + "phase-3", + 300, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertFalse(result.success()); + assertEquals("fail-phase", result.failedPhase()); + assertEquals("模拟失败", result.errorMessage()); + // 失败阶段之后的阶段不应被执行 + assertFalse(log.contains("execute:phase-3")); + // 失败阶段之前的阶段应已执行 + assertTrue(log.contains("execute:phase-1")); + } + + @Test + void execute_failureResultContainsPhaseFailEvent() { + List log = new CopyOnWriteArrayList<>(); + List phases = + List.of( + new TestPhase( + "fail-phase", + 100, + true, + true, + RetryPolicy.none(), + true, + "出错了", + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertFalse(result.success()); + boolean hasFailEvent = + result.events().stream() + .anyMatch( + e -> + e.phase().equals("fail-phase") + && e.type() == InitEvent.EventType.PHASE_FAIL); + assertTrue(hasFailEvent, "应记录 PHASE_FAIL 事件"); + assertEquals(PhaseStatus.FAILED, context.getPhaseStatuses().get("fail-phase")); + } + + @Test + void execute_failureResultContainsDurationsForCompletedPhases() { + List log = new CopyOnWriteArrayList<>(); + List phases = + List.of( + new TestPhase( + "ok-phase", + 100, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log), + new TestPhase( + "fail-phase", + 200, + true, + true, + RetryPolicy.none(), + true, + "失败", + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertFalse(result.success()); + assertTrue(result.phaseDurations().containsKey("ok-phase")); + assertTrue(result.phaseDurations().containsKey("fail-phase")); + assertNotNull(result.totalDuration()); + } + + @Test + void execute_verifyFailureCausesFailResult() { + List log = new CopyOnWriteArrayList<>(); + // verify 返回 false + List phases = + List.of( + new TestPhase( + "bad-verify", + 100, + true, + false, + RetryPolicy.none(), + false, + null, + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertFalse(result.success()); + assertEquals("bad-verify", result.failedPhase()); + assertTrue(result.errorMessage().contains("验证失败")); + } + + // ========================================================================= + // 测试:重试逻辑(第 N 次成功)(Requirements 4.4) + // ========================================================================= + + @Test + void execute_retriesAndSucceedsOnSecondAttempt() { + List log = new CopyOnWriteArrayList<>(); + RetryPolicy policy = new RetryPolicy(3, Duration.ofMillis(10), 1.0, Duration.ofMillis(50)); + RetryableTestPhase retryPhase = new RetryableTestPhase("retry-phase", 100, policy, 2, log); + + SandboxInitPipeline pipeline = + new SandboxInitPipeline(List.of(retryPhase), defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertTrue(result.success()); + assertEquals(2, retryPhase.getAttemptCount()); + assertTrue(log.contains("execute:retry-phase:attempt-1")); + assertTrue(log.contains("execute:retry-phase:attempt-2")); + } + + @Test + void execute_retriesAndSucceedsOnThirdAttempt() { + List log = new CopyOnWriteArrayList<>(); + RetryPolicy policy = new RetryPolicy(3, Duration.ofMillis(10), 1.0, Duration.ofMillis(50)); + RetryableTestPhase retryPhase = new RetryableTestPhase("retry-phase", 100, policy, 3, log); + + SandboxInitPipeline pipeline = + new SandboxInitPipeline(List.of(retryPhase), defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertTrue(result.success()); + assertEquals(3, retryPhase.getAttemptCount()); + } + + @Test + void execute_failsAfterExhaustingRetries() { + List log = new CopyOnWriteArrayList<>(); + RetryPolicy policy = new RetryPolicy(2, Duration.ofMillis(10), 1.0, Duration.ofMillis(50)); + // 需要第 10 次才成功,但只允许 2 次重试(共 3 次尝试) + RetryableTestPhase retryPhase = new RetryableTestPhase("retry-phase", 100, policy, 10, log); + + SandboxInitPipeline pipeline = + new SandboxInitPipeline(List.of(retryPhase), defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertFalse(result.success()); + assertEquals("retry-phase", result.failedPhase()); + assertEquals(3, retryPhase.getAttemptCount()); // 1 初始 + 2 重试 + } + + @Test + void execute_retryRecordsPhaseRetryEvents() { + List log = new CopyOnWriteArrayList<>(); + RetryPolicy policy = new RetryPolicy(2, Duration.ofMillis(10), 1.0, Duration.ofMillis(50)); + RetryableTestPhase retryPhase = new RetryableTestPhase("retry-phase", 100, policy, 2, log); + + SandboxInitPipeline pipeline = + new SandboxInitPipeline(List.of(retryPhase), defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertTrue(result.success()); + boolean hasRetryEvent = + context.getEvents().stream() + .anyMatch( + e -> + e.phase().equals("retry-phase") + && e.type() == InitEvent.EventType.PHASE_RETRY); + assertTrue(hasRetryEvent, "应记录 PHASE_RETRY 事件"); + } + + @Test + void execute_nonRetryableExceptionDoesNotRetry() { + List log = new CopyOnWriteArrayList<>(); + // retryable=false,即使有重试策略也不重试 + List phases = + List.of( + new TestPhase( + "no-retry", + 100, + true, + true, + new RetryPolicy( + 3, Duration.ofMillis(10), 1.0, Duration.ofMillis(50)), + true, + "不可重试错误", + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertFalse(result.success()); + // 只执行了一次 + long execCount = log.stream().filter(e -> e.equals("execute:no-retry")).count(); + assertEquals(1, execCount); + } + + @Test + void execute_retryPolicyNone_doesNotRetry() { + List log = new CopyOnWriteArrayList<>(); + List phases = + List.of( + new TestPhase( + "no-retry", + 100, + true, + true, + RetryPolicy.none(), + true, + "直接失败", + true, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertFalse(result.success()); + long execCount = log.stream().filter(e -> e.equals("execute:no-retry")).count(); + assertEquals(1, execCount); + } + + // ========================================================================= + // 测试:总超时终止 (Requirements 4.7) + // ========================================================================= + + @Test + void execute_terminatesWhenTotalTimeoutExceeded() { + Duration totalTimeout = Duration.ofSeconds(1); + InitConfig config = new InitConfig(totalTimeout, true, true, false); + + // 第一个阶段耗时超过 timeout + InitPhase slowPhase = + new InitPhase() { + @Override + public String name() { + return "slow-phase"; + } + + @Override + public int order() { + return 100; + } + + @Override + public boolean shouldExecute(InitContext context) { + return true; + } + + @Override + public void execute(InitContext context) throws InitPhaseException { + try { + Thread.sleep(1500); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + + @Override + public boolean verify(InitContext context) { + return true; + } + + @Override + public RetryPolicy retryPolicy() { + return RetryPolicy.none(); + } + }; + + List log = new CopyOnWriteArrayList<>(); + TestPhase afterPhase = + new TestPhase( + "after-timeout", + 200, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log); + + SandboxInitPipeline pipeline = + new SandboxInitPipeline(List.of(slowPhase, afterPhase), config); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + // 超时后第二个阶段不应被执行 + assertFalse(result.success()); + assertNotNull(result.failedPhase()); + assertTrue(result.errorMessage().contains("超时")); + assertFalse(log.contains("execute:after-timeout")); + } + + @Test + void execute_timeoutResultContainsFailPhaseInfo() { + Duration totalTimeout = Duration.ofMillis(100); + InitConfig config = new InitConfig(totalTimeout, true, true, false); + + InitPhase slowPhase = + new InitPhase() { + @Override + public String name() { + return "slow"; + } + + @Override + public int order() { + return 100; + } + + @Override + public boolean shouldExecute(InitContext context) { + return true; + } + + @Override + public void execute(InitContext context) throws InitPhaseException { + try { + Thread.sleep(300); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + + @Override + public boolean verify(InitContext context) { + return true; + } + + @Override + public RetryPolicy retryPolicy() { + return RetryPolicy.none(); + } + }; + + List log = new CopyOnWriteArrayList<>(); + TestPhase nextPhase = + new TestPhase("next", 200, true, true, RetryPolicy.none(), false, null, false, log); + + SandboxInitPipeline pipeline = + new SandboxInitPipeline(List.of(slowPhase, nextPhase), config); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertFalse(result.success()); + assertNotNull(result.totalDuration()); + assertNotNull(result.events()); + assertFalse(result.events().isEmpty()); + } + + // ========================================================================= + // 测试:resumeFrom 恢复执行 (Requirements 4.8) + // ========================================================================= + + @Test + void resumeFrom_executesFromSpecifiedPhase() { + List log = new CopyOnWriteArrayList<>(); + List phases = + List.of( + new TestPhase( + "phase-1", + 100, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log), + new TestPhase( + "phase-2", + 200, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log), + new TestPhase( + "phase-3", + 300, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.resumeFrom(context, "phase-2"); + + assertTrue(result.success()); + // phase-1 不应被执行 + assertFalse(log.contains("execute:phase-1")); + // phase-2 和 phase-3 应被执行 + assertTrue(log.contains("execute:phase-2")); + assertTrue(log.contains("execute:phase-3")); + } + + @Test + void resumeFrom_lastPhase_executesOnlyThatPhase() { + List log = new CopyOnWriteArrayList<>(); + List phases = + List.of( + new TestPhase( + "phase-1", + 100, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log), + new TestPhase( + "phase-2", + 200, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.resumeFrom(context, "phase-2"); + + assertTrue(result.success()); + assertFalse(log.contains("execute:phase-1")); + assertTrue(log.contains("execute:phase-2")); + } + + @Test + void resumeFrom_unknownPhase_executesFromBeginning() { + List log = new CopyOnWriteArrayList<>(); + List phases = + List.of( + new TestPhase( + "phase-1", + 100, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log), + new TestPhase( + "phase-2", + 200, + true, + true, + RetryPolicy.none(), + false, + null, + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + // 不存在的阶段名,应从头开始 + InitResult result = pipeline.resumeFrom(context, "nonexistent"); + + assertTrue(result.success()); + assertTrue(log.contains("execute:phase-1")); + assertTrue(log.contains("execute:phase-2")); + } + + // ========================================================================= + // 测试:空阶段列表 + // ========================================================================= + + @Test + void execute_emptyPhaseList_returnsSuccess() { + SandboxInitPipeline pipeline = new SandboxInitPipeline(List.of(), defaultConfig()); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + assertTrue(result.success()); + assertNotNull(result.totalDuration()); + assertTrue(result.phaseDurations().isEmpty()); + } + + // ========================================================================= + // 测试:验证禁用时跳过 verify (Requirements 4.3) + // ========================================================================= + + @Test + void execute_skipsVerifyWhenVerificationDisabled() { + List log = new CopyOnWriteArrayList<>(); + // verify 返回 false,但 enableVerification=false + InitConfig config = new InitConfig(Duration.ofSeconds(30), true, false, false); + List phases = + List.of( + new TestPhase( + "phase-1", + 100, + true, + false, + RetryPolicy.none(), + false, + null, + false, + log)); + + SandboxInitPipeline pipeline = new SandboxInitPipeline(phases, config); + InitContext context = createContext(new StubSandboxProvider(SandboxType.REMOTE)); + + InitResult result = pipeline.execute(context); + + // 即使 verify 返回 false,禁用验证后仍应成功 + assertTrue(result.success()); + assertTrue(log.contains("execute:phase-1")); + // verify 不应被调用 + assertFalse(log.contains("verify:phase-1")); + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/session/QwenCodeAuthFlowTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/session/QwenCodeAuthFlowTest.java new file mode 100644 index 000000000..a22163deb --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/session/QwenCodeAuthFlowTest.java @@ -0,0 +1,438 @@ +package com.alibaba.himarket.service.hicoding.session; + +import static org.junit.jupiter.api.Assertions.*; + +import com.alibaba.himarket.service.hicoding.websocket.CliProcess; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; +import org.junit.jupiter.api.io.TempDir; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Qwen Code 认证流程集成测试。 + * + *

测试场景:隔离 HOME 环境下,通过 OpenAI 兼容模式认证,完成 ACP 完整流程。 + * + *

测试流程: + * 1. 创建隔离的 HOME 目录(模拟全新用户环境) + * 2. 启动 qwen --acp,附带 OpenAI 认证参数 + * 3. 完成 initialize -> session/new -> session/prompt 完整流程 + * 4. 验证响应格式和流式输出 + * + *

认证方式:使用 DashScope OpenAI 兼容模式 + * - Base URL: https://dashscope.aliyuncs.com/compatible-mode/v1 + * - API Key: 通过环境变量 DASHSCOPE_API_KEY 提供 + * + *

运行前提: + * - 安装 qwen CLI (npm i -g @anthropic/qwen-code-cli 或其他方式) + * - 设置环境变量 DASHSCOPE_API_KEY + */ +class QwenCodeAuthFlowTest { + + private static final Logger log = LoggerFactory.getLogger(QwenCodeAuthFlowTest.class); + private static final ObjectMapper mapper = new ObjectMapper(); + private static final int TIMEOUT_SECONDS = 120; // 流式响应可能需要更长时间 + + private static final String DASHSCOPE_BASE_URL = + "https://dashscope.aliyuncs.com/compatible-mode/v1"; + + @TempDir Path tempDir; + + /** + * 测试:隔离 HOME + OpenAI 认证,完成完整 ACP 流程。 + * + *

需要设置环境变量 DASHSCOPE_API_KEY 才能运行此测试。 + */ + @Test + @EnabledIfEnvironmentVariable(named = "DASHSCOPE_API_KEY", matches = ".+") + void testQwenCodeWithOpenAIAuth() throws Exception { + String apiKey = System.getenv("DASHSCOPE_API_KEY"); + assertNotNull(apiKey, "DASHSCOPE_API_KEY 环境变量必须设置"); + + if (!isCommandAvailable("qwen")) { + log.warn("跳过测试: qwen 命令未安装"); + org.junit.jupiter.api.Assumptions.assumeTrue(false, "qwen not installed, skipping"); + } + + // 创建隔离的工作目录和 HOME 目录 + Path cwd = tempDir.resolve("qwen-test"); + Path isolatedHome = tempDir.resolve("qwen-home"); + Files.createDirectories(cwd); + Files.createDirectories(isolatedHome); + + // 构建带认证参数的命令行参数 + List args = new ArrayList<>(); + args.add("--acp"); + args.add("--auth-type"); + args.add("openai"); + args.add("--openai-api-key"); + args.add(apiKey); + args.add("--openai-base-url"); + args.add(DASHSCOPE_BASE_URL); + args.add("-m"); + args.add("qwen-max"); // 指定模型,避免默认模型额度用完 + + log.info("=== Qwen Code 认证流程测试 ==="); + log.info("隔离 HOME: {}", isolatedHome); + log.info("工作目录: {}", cwd); + log.info("Base URL: {}", DASHSCOPE_BASE_URL); + + // 使用隔离的 HOME 目录启动进程 + Map extraEnv = Map.of("HOME", isolatedHome.toString()); + CliProcess process = new CliProcess("qwen", args, cwd.toString(), extraEnv); + + try { + process.start(); + assertTrue(process.isAlive(), "Qwen 进程应该处于运行状态"); + + AtomicInteger requestId = new AtomicInteger(0); + CountDownLatch initLatch = new CountDownLatch(1); + CountDownLatch sessionLatch = new CountDownLatch(1); + CountDownLatch promptLatch = new CountDownLatch(1); + + AtomicReference initResponse = new AtomicReference<>(); + AtomicReference sessionResponse = new AtomicReference<>(); + AtomicReference promptResponse = new AtomicReference<>(); + CopyOnWriteArrayList sessionUpdates = new CopyOnWriteArrayList<>(); + + process.stdout() + .subscribe( + line -> { + log.info("[qwen] STDOUT: {}", truncate(line, 200)); + try { + JsonNode node = mapper.readTree(line); + + // 处理 session/update 通知 + if (node.has("method") + && "session/update" + .equals(node.get("method").asText())) { + sessionUpdates.add(line); + return; + } + + if (node.has("id")) { + int id = node.get("id").asInt(); + if (id == 0) { + initResponse.set(line); + initLatch.countDown(); + } else if (id == 1) { + sessionResponse.set(line); + sessionLatch.countDown(); + } else if (id == 2) { + if (node.has("result") || node.has("error")) { + promptResponse.set(line); + promptLatch.countDown(); + } + } + } + } catch (Exception e) { + log.debug("[qwen] 非 JSON 输出: {}", line); + } + }); + + process.stderr().subscribe(line -> log.debug("[qwen] STDERR: {}", line)); + + // Step 1: initialize + log.info("=== Step 1: initialize ==="); + process.send(buildInitializeRequest(requestId.getAndIncrement())); + boolean initReceived = initLatch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + assertTrue(initReceived, "应在超时内返回 initialize 响应"); + + JsonNode initResult = mapper.readTree(initResponse.get()).get("result"); + assertNotNull(initResult, "initialize 应包含 result"); + + // 打印 agent 信息 + if (initResult.has("agentInfo")) { + JsonNode agentInfo = initResult.get("agentInfo"); + log.info( + "Agent: {} v{}", + agentInfo.path("name").asText("unknown"), + agentInfo.path("version").asText("unknown")); + } + + // 打印支持的模型 + if (initResult.has("modes")) { + JsonNode modes = initResult.get("modes"); + log.info("支持的模式: {}", modes); + } + + log.info("initialize 成功"); + + // Step 2: session/new + log.info("=== Step 2: session/new ==="); + process.send(buildSessionNewRequest(requestId.getAndIncrement(), cwd.toString())); + boolean sessionReceived = sessionLatch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + assertTrue(sessionReceived, "应在超时内返回 session/new 响应"); + + JsonNode sessionRoot = mapper.readTree(sessionResponse.get()); + + // 检查是否有错误 + if (sessionRoot.has("error")) { + JsonNode error = sessionRoot.get("error"); + int code = error.path("code").asInt(-1); + String message = error.path("message").asText("Unknown error"); + log.error("session/new 错误: code={}, message={}", code, message); + + if (code == -32000) { + log.warn("认证失败,可能是 API Key 无效或认证参数未正确传递"); + // 打印 authMethods 供调试 + if (error.has("data") && error.get("data").has("authMethods")) { + log.info("可用认证方式: {}", error.get("data").get("authMethods")); + } + } + fail("session/new 返回错误: " + message); + } + + JsonNode sessionResult = sessionRoot.get("result"); + assertNotNull(sessionResult, "session/new 应包含 result"); + + String sessionId = sessionResult.path("sessionId").asText(); + assertFalse(sessionId.isEmpty(), "sessionId 不应为空"); + log.info("sessionId: {}", sessionId); + log.info("session/new 成功"); + + // Step 3: session/prompt + log.info("=== Step 3: session/prompt ==="); + String testPrompt = "请用一句话介绍你自己,不超过20个字"; + process.send( + buildSessionPromptRequest(requestId.getAndIncrement(), sessionId, testPrompt)); + + boolean promptReceived = promptLatch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + assertTrue(promptReceived, "应在超时内返回 session/prompt 响应"); + + String rawPromptResponse = promptResponse.get(); + assertNotNull(rawPromptResponse, "session/prompt 响应不应为 null"); + + JsonNode promptRoot = mapper.readTree(rawPromptResponse); + + if (promptRoot.has("error")) { + JsonNode error = promptRoot.get("error"); + int code = error.path("code").asInt(-1); + String message = error.path("message").asText("Unknown error"); + String details = error.path("data").path("details").asText(""); + + log.warn("session/prompt 错误: code={}, message={}", code, message); + log.warn("错误详情: {}", details); + + // -32603 Internal error 且包含 "free tier" 或 "exhausted" 表示账户额度问题 + // 这种情况说明认证流程本身是成功的,只是账户限制 + if (code == -32603 + && (details.contains("free tier") + || details.contains("exhausted") + || details.contains("quota"))) { + log.info("=== 测试通过: 认证流程验证成功(API 调用因账户额度限制失败,但认证本身正确)==="); + log.info("收到 {} 个 session/update 通知", sessionUpdates.size()); + return; + } + + fail("session/prompt 返回错误: " + message + " - " + details); + } + + JsonNode promptResult = promptRoot.get("result"); + assertNotNull(promptResult, "session/prompt 应包含 result"); + + // 验证响应结构 + if (promptResult.has("stopReason")) { + String stopReason = promptResult.get("stopReason").asText(); + log.info("stopReason: {}", stopReason); + } + + log.info("session/prompt 成功"); + + // 打印流式更新统计 + log.info("收到 {} 个 session/update 通知", sessionUpdates.size()); + + log.info("=== 测试通过: 隔离 HOME + OpenAI 认证 -> prompt 流程验证完成 ==="); + + } finally { + process.close(); + log.info("进程已关闭"); + } + } + + /** + * 测试:验证未设置认证时的错误处理。 + * + *

在隔离 HOME 环境下,不提供认证参数,验证返回 -32000 错误。 + */ + @Test + void testQwenCodeWithoutAuth() throws Exception { + if (!isCommandAvailable("qwen")) { + log.warn("跳过测试: qwen 命令未安装"); + org.junit.jupiter.api.Assumptions.assumeTrue(false, "qwen not installed, skipping"); + } + + // 创建隔离的工作目录和 HOME 目录 + Path cwd = tempDir.resolve("qwen-noauth"); + Path isolatedHome = tempDir.resolve("qwen-noauth-home"); + Files.createDirectories(cwd); + Files.createDirectories(isolatedHome); + + log.info("=== Qwen Code 未认证错误测试 ==="); + + // 使用隔离的 HOME 目录,不提供认证参数 + Map extraEnv = Map.of("HOME", isolatedHome.toString()); + CliProcess process = new CliProcess("qwen", List.of("--acp"), cwd.toString(), extraEnv); + + try { + process.start(); + assertTrue(process.isAlive(), "Qwen 进程应该处于运行状态"); + + AtomicInteger requestId = new AtomicInteger(0); + CountDownLatch initLatch = new CountDownLatch(1); + CountDownLatch sessionLatch = new CountDownLatch(1); + + AtomicReference initResponse = new AtomicReference<>(); + AtomicReference sessionResponse = new AtomicReference<>(); + + process.stdout() + .subscribe( + line -> { + log.info("[qwen-noauth] STDOUT: {}", truncate(line, 200)); + try { + JsonNode node = mapper.readTree(line); + if (node.has("id")) { + int id = node.get("id").asInt(); + if (id == 0) { + initResponse.set(line); + initLatch.countDown(); + } else if (id == 1) { + sessionResponse.set(line); + sessionLatch.countDown(); + } + } + } catch (Exception e) { + log.debug("[qwen-noauth] 非 JSON 输出: {}", line); + } + }); + + process.stderr().subscribe(line -> log.debug("[qwen-noauth] STDERR: {}", line)); + + // Step 1: initialize + log.info("=== Step 1: initialize ==="); + process.send(buildInitializeRequest(requestId.getAndIncrement())); + boolean initReceived = initLatch.await(60, TimeUnit.SECONDS); + assertTrue(initReceived, "应在超时内返回 initialize 响应"); + log.info("initialize 成功"); + + // Step 2: session/new - 预期返回认证错误 + log.info("=== Step 2: session/new (expect -32000 error) ==="); + process.send(buildSessionNewRequest(requestId.getAndIncrement(), cwd.toString())); + boolean sessionReceived = sessionLatch.await(60, TimeUnit.SECONDS); + assertTrue(sessionReceived, "应在超时内返回 session/new 响应"); + + JsonNode sessionRoot = mapper.readTree(sessionResponse.get()); + assertTrue(sessionRoot.has("error"), "未认证时 session/new 应返回错误"); + + JsonNode error = sessionRoot.get("error"); + int code = error.path("code").asInt(-1); + assertEquals(-32000, code, "认证错误码应为 -32000"); + + log.info("错误码: {}", code); + log.info("错误消息: {}", error.path("message").asText()); + + // 验证 authMethods 在错误响应中 + if (error.has("data") && error.get("data").has("authMethods")) { + JsonNode authMethods = error.get("data").get("authMethods"); + assertTrue(authMethods.isArray(), "authMethods 应为数组"); + log.info("错误响应包含 {} 个 authMethods", authMethods.size()); + + for (JsonNode method : authMethods) { + log.info( + " authMethod: id={}, name={}, type={}", + method.path("id").asText(), + method.path("name").asText(), + method.path("type").asText()); + } + } + + log.info("=== 测试通过: 未认证错误处理验证完成 ==="); + + } finally { + process.close(); + log.info("进程已关闭"); + } + } + + private String buildInitializeRequest(int id) throws Exception { + var fsNode = mapper.createObjectNode().put("readTextFile", true).put("writeTextFile", true); + var capNode = mapper.createObjectNode().put("terminal", true); + capNode.set("fs", fsNode); + var infoNode = + mapper.createObjectNode() + .put("name", "himarket-test") + .put("title", "HiMarket ACP Test") + .put("version", "1.0.0"); + var paramsNode = mapper.createObjectNode().put("protocolVersion", 1); + paramsNode.set("clientCapabilities", capNode); + paramsNode.set("clientInfo", infoNode); + var rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", id) + .put("method", "initialize"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + private String buildSessionNewRequest(int id, String cwd) throws Exception { + var paramsNode = mapper.createObjectNode().put("cwd", cwd); + paramsNode.set("mcpServers", mapper.createArrayNode()); + var rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", id) + .put("method", "session/new"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + private String buildSessionPromptRequest(int id, String sessionId, String text) + throws Exception { + // prompt 是 ContentBlock 数组,格式为 [{"type": "text", "text": "..."}] + var textBlock = mapper.createObjectNode().put("type", "text").put("text", text); + var promptArray = mapper.createArrayNode().add(textBlock); + + var paramsNode = mapper.createObjectNode(); + paramsNode.put("sessionId", sessionId); + paramsNode.set("prompt", promptArray); + + var rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", id) + .put("method", "session/prompt"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + private boolean isCommandAvailable(String command) { + try { + Process p = new ProcessBuilder("which", command).redirectErrorStream(true).start(); + boolean exited = p.waitFor(5, TimeUnit.SECONDS); + return exited && p.exitValue() == 0; + } catch (Exception e) { + return false; + } + } + + private String truncate(String str, int maxLen) { + if (str == null || str.length() <= maxLen) { + return str; + } + return str.substring(0, maxLen) + "..."; + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/session/SessionModelsTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/session/SessionModelsTest.java new file mode 100644 index 000000000..4725844db --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/session/SessionModelsTest.java @@ -0,0 +1,236 @@ +package com.alibaba.himarket.service.hicoding.session; + +import static org.junit.jupiter.api.Assertions.*; + +import com.alibaba.himarket.service.hicoding.websocket.CliProcess; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * 集成测试:验证各 CLI 通过 ACP 协议 session/new 返回的 models 和 modes。 + * + *

依赖真实 CLI 工具,使用 {@code mvn test -Dgroups=integration} 显式启用。 + */ +@Tag("integration") +class SessionModelsTest { + + private static final Logger log = LoggerFactory.getLogger(SessionModelsTest.class); + private static final ObjectMapper mapper = new ObjectMapper(); + private static final int LOCAL_TIMEOUT = 30; + private static final int NPX_TIMEOUT = 90; + + @TempDir Path tempDir; + + record CliDef(String key, String command, List args) { + @Override + public String toString() { + return key; + } + + boolean isNpx() { + return "npx".equals(command); + } + + int timeout() { + return isNpx() ? NPX_TIMEOUT : LOCAL_TIMEOUT; + } + } + + static List cliProviders() { + return List.of( + new CliDef("kiro-cli", "kiro-cli", List.of("acp")), + new CliDef("qwen-code", "qwen", List.of("--acp"))); + } + + @ParameterizedTest(name = "session/new models & modes: {0}") + @MethodSource("cliProviders") + void testSessionNewModelsAndModes(CliDef cli) throws Exception { + if (!isCommandAvailable(cli.command())) { + log.warn("跳过 {}: 命令 '{}' 未安装", cli.key(), cli.command()); + org.junit.jupiter.api.Assumptions.assumeTrue( + false, cli.command() + " not installed, skipping"); + } + + Path cwd = tempDir.resolve(cli.key()); + Files.createDirectories(cwd); + + CliProcess process = + new CliProcess(cli.command(), cli.args(), cwd.toString(), Collections.emptyMap()); + + try { + process.start(); + assertTrue(process.isAlive(), cli.key() + " 进程应该处于运行状态"); + + AtomicInteger expectedId = new AtomicInteger(0); + CountDownLatch initLatch = new CountDownLatch(1); + CountDownLatch sessionLatch = new CountDownLatch(1); + AtomicReference initResponse = new AtomicReference<>(); + AtomicReference sessionResponse = new AtomicReference<>(); + + process.stdout() + .subscribe( + line -> { + log.info("[{}] STDOUT: {}", cli.key(), line); + try { + JsonNode node = mapper.readTree(line); + if (node.has("id") && node.has("result")) { + int id = node.get("id").asInt(); + if (id == 0) { + initResponse.set(line); + initLatch.countDown(); + } else if (id == 1) { + sessionResponse.set(line); + sessionLatch.countDown(); + } + } else if (node.has("error")) { + log.error("[{}] 错误响应: {}", cli.key(), line); + initLatch.countDown(); + sessionLatch.countDown(); + } + } catch (Exception e) { + // 非 JSON 行,忽略 + } + }); + + process.stderr().subscribe(line -> log.debug("[{}] STDERR: {}", cli.key(), line)); + + // Step 1: initialize + log.info("[{}] 发送 initialize 请求", cli.key()); + process.send(buildInitializeRequest()); + boolean initReceived = initLatch.await(cli.timeout(), TimeUnit.SECONDS); + assertTrue(initReceived, cli.key() + " 应在超时内返回 initialize 响应"); + assertNotNull(initResponse.get(), cli.key() + " initialize 响应不应为 null"); + + // Step 2: session/new + log.info("[{}] 发送 session/new 请求", cli.key()); + process.send(buildSessionNewRequest(cwd.toString())); + boolean sessionReceived = sessionLatch.await(cli.timeout(), TimeUnit.SECONDS); + assertTrue(sessionReceived, cli.key() + " 应在超时内返回 session/new 响应"); + + String rawSession = sessionResponse.get(); + assertNotNull(rawSession, cli.key() + " session/new 响应不应为 null"); + + // 解析 session/new 结果 + JsonNode root = mapper.readTree(rawSession); + JsonNode result = root.get("result"); + assertNotNull(result, cli.key() + " session/new 应包含 result"); + + // sessionId + assertTrue(result.has("sessionId"), cli.key() + " session/new result 应包含 sessionId"); + String sessionId = result.get("sessionId").asText(); + assertFalse(sessionId.isBlank(), cli.key() + " sessionId 不应为空"); + log.info("[{}] sessionId = {}", cli.key(), sessionId); + + // models + log.info("[{}] ===== Models =====", cli.key()); + if (result.has("models") && !result.get("models").isNull()) { + JsonNode models = result.get("models"); + if (models.has("availableModels")) { + JsonNode availableModels = models.get("availableModels"); + assertTrue(availableModels.isArray(), cli.key() + " availableModels 应为数组"); + log.info("[{}] 可用模型数量: {}", cli.key(), availableModels.size()); + for (JsonNode m : availableModels) { + String modelId = m.has("modelId") ? m.get("modelId").asText() : "N/A"; + String name = m.has("name") ? m.get("name").asText() : "N/A"; + log.info("[{}] model: id={}, name={}", cli.key(), modelId, name); + } + } + if (models.has("currentModelId")) { + log.info("[{}] 当前模型: {}", cli.key(), models.get("currentModelId").asText()); + } + } else { + log.info("[{}] 无 models 字段", cli.key()); + } + + // modes + log.info("[{}] ===== Modes =====", cli.key()); + if (result.has("modes") && !result.get("modes").isNull()) { + JsonNode modes = result.get("modes"); + if (modes.has("availableModes")) { + JsonNode availableModes = modes.get("availableModes"); + assertTrue(availableModes.isArray(), cli.key() + " availableModes 应为数组"); + log.info("[{}] 可用模式数量: {}", cli.key(), availableModes.size()); + for (JsonNode m : availableModes) { + String modeId = m.has("id") ? m.get("id").asText() : "N/A"; + String name = m.has("name") ? m.get("name").asText() : "N/A"; + String desc = m.has("description") ? m.get("description").asText() : ""; + log.info( + "[{}] mode: id={}, name={}, desc={}", + cli.key(), + modeId, + name, + desc); + } + } + if (modes.has("currentModeId")) { + log.info("[{}] 当前模式: {}", cli.key(), modes.get("currentModeId").asText()); + } + } else { + log.info("[{}] 无 modes 字段", cli.key()); + } + + log.info("[{}] ✅ session/new 握手成功!", cli.key()); + + } finally { + process.close(); + log.info("[{}] 进程已关闭", cli.key()); + } + } + + private String buildInitializeRequest() throws Exception { + var fsNode = mapper.createObjectNode().put("readTextFile", true).put("writeTextFile", true); + var capNode = mapper.createObjectNode().put("terminal", true); + capNode.set("fs", fsNode); + var infoNode = + mapper.createObjectNode() + .put("name", "himarket-test") + .put("title", "HiMarket ACP Test") + .put("version", "1.0.0"); + var paramsNode = mapper.createObjectNode().put("protocolVersion", 1); + paramsNode.set("clientCapabilities", capNode); + paramsNode.set("clientInfo", infoNode); + var rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", 0) + .put("method", "initialize"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + private String buildSessionNewRequest(String cwd) throws Exception { + var paramsNode = mapper.createObjectNode().put("cwd", cwd); + paramsNode.set("mcpServers", mapper.createArrayNode()); + var rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", 1) + .put("method", "session/new"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + private boolean isCommandAvailable(String command) { + try { + Process p = new ProcessBuilder("which", command).redirectErrorStream(true).start(); + boolean exited = p.waitFor(5, TimeUnit.SECONDS); + return exited && p.exitValue() == 0; + } catch (Exception e) { + return false; + } + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/AcpPropertiesTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/AcpPropertiesTest.java new file mode 100644 index 000000000..8a033af24 --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/AcpPropertiesTest.java @@ -0,0 +1,132 @@ +package com.alibaba.himarket.service.hicoding.websocket; + +import static org.junit.jupiter.api.Assertions.*; + +import com.alibaba.himarket.config.AcpProperties; +import com.alibaba.himarket.config.AcpProperties.CliProviderConfig; +import com.alibaba.himarket.service.hicoding.sandbox.SandboxType; +import java.util.List; +import java.util.Map; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class AcpPropertiesTest { + + private AcpProperties properties; + + @BeforeEach + void setUp() { + properties = new AcpProperties(); + properties.setDefaultProvider("qodercli"); + + CliProviderConfig qoder = new CliProviderConfig(); + qoder.setDisplayName("Qoder CLI"); + qoder.setCommand("qodercli"); + qoder.setArgs("--acp"); + qoder.setCompatibleRuntimes(List.of(SandboxType.REMOTE, SandboxType.OPEN_SANDBOX)); + + CliProviderConfig kiro = new CliProviderConfig(); + kiro.setDisplayName("Kiro CLI"); + kiro.setCommand("kiro-cli"); + kiro.setArgs("acp"); + kiro.setCompatibleRuntimes(List.of(SandboxType.REMOTE)); + + CliProviderConfig claude = new CliProviderConfig(); + claude.setDisplayName("Claude Code"); + claude.setCommand("npx"); + claude.setArgs("claude-code-acp"); + claude.setEnv(Map.of("ANTHROPIC_API_KEY", "test-key")); + claude.setCompatibleRuntimes(List.of(SandboxType.REMOTE)); + + CliProviderConfig codex = new CliProviderConfig(); + codex.setDisplayName("Codex CLI"); + codex.setCommand("codex"); + codex.setArgs("--acp"); + codex.setCompatibleRuntimes(List.of(SandboxType.REMOTE)); + + properties.setProviders( + Map.of( + "qodercli", qoder, + "kiro-cli", kiro, + "claude-code", claude, + "codex", codex)); + } + + @Test + void testGetDefaultProvider() { + assertEquals("qodercli", properties.getDefaultProvider()); + } + + @Test + void testGetDefaultProviderConfig() { + CliProviderConfig config = properties.getDefaultProviderConfig(); + assertNotNull(config); + assertEquals("qodercli", config.getCommand()); + assertEquals("--acp", config.getArgs()); + assertEquals("Qoder CLI", config.getDisplayName()); + } + + @Test + void testGetProviderByKey() { + CliProviderConfig kiro = properties.getProvider("kiro-cli"); + assertNotNull(kiro); + assertEquals("kiro-cli", kiro.getCommand()); + assertEquals("acp", kiro.getArgs()); + } + + @Test + void testGetProviderWithEnv() { + CliProviderConfig claude = properties.getProvider("claude-code"); + assertNotNull(claude); + assertEquals("npx", claude.getCommand()); + assertEquals("claude-code-acp", claude.getArgs()); + assertEquals("test-key", claude.getEnv().get("ANTHROPIC_API_KEY")); + } + + @Test + void testGetUnknownProviderReturnsNull() { + assertNull(properties.getProvider("unknown-cli")); + } + + @Test + void testProviderCount() { + assertEquals(4, properties.getProviders().size()); + } + + @Test + void testChangeDefaultProvider() { + properties.setDefaultProvider("kiro-cli"); + CliProviderConfig config = properties.getDefaultProviderConfig(); + assertNotNull(config); + assertEquals("kiro-cli", config.getCommand()); + } + + @Test + void testDefaultRuntimeDefaultValue() { + AcpProperties fresh = new AcpProperties(); + assertEquals("remote", fresh.getDefaultRuntime()); + } + + @Test + void testSetDefaultRuntime() { + properties.setDefaultRuntime("remote"); + assertEquals("remote", properties.getDefaultRuntime()); + } + + @Test + void testCompatibleRuntimesForNativeCli() { + CliProviderConfig qoder = properties.getProvider("qodercli"); + assertNotNull(qoder.getCompatibleRuntimes()); + assertEquals(2, qoder.getCompatibleRuntimes().size()); + assertTrue(qoder.getCompatibleRuntimes().contains(SandboxType.REMOTE)); + assertTrue(qoder.getCompatibleRuntimes().contains(SandboxType.OPEN_SANDBOX)); + } + + @Test + void testCompatibleRuntimesForNodejsCli() { + CliProviderConfig claude = properties.getProvider("claude-code"); + assertNotNull(claude.getCompatibleRuntimes()); + assertEquals(1, claude.getCompatibleRuntimes().size()); + assertTrue(claude.getCompatibleRuntimes().contains(SandboxType.REMOTE)); + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/CliProcessMultiCliTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/CliProcessMultiCliTest.java new file mode 100644 index 000000000..f8b0d49e7 --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/CliProcessMultiCliTest.java @@ -0,0 +1,187 @@ +package com.alibaba.himarket.service.hicoding.websocket; + +import static org.junit.jupiter.api.Assertions.*; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * 集成测试:验证 CliProcess 能通过 ACP 协议成功连接多种 CLI 工具。 + * + *

测试逻辑: 1. 启动 CLI 子进程(ACP 模式) 2. 发送 initialize 请求(JSON-RPC 2.0) 3. + * 验证收到包含 protocolVersion 和 agentCapabilities 的响应 4. 优雅关闭进程 + * + *

仅测试本机已安装的 CLI 工具,未安装的自动跳过。 npx 类型的 provider 首次运行需要下载包,超时设为 90 秒。 + */ +class CliProcessMultiCliTest { + + private static final Logger log = LoggerFactory.getLogger(CliProcessMultiCliTest.class); + private static final ObjectMapper mapper = new ObjectMapper(); + + /** 本地 CLI 超时(秒) */ + private static final int LOCAL_TIMEOUT = 30; + + /** npx 类型 CLI 超时(秒),首次需要下载包 */ + private static final int NPX_TIMEOUT = 90; + + @TempDir Path tempDir; + + record CliDef(String key, String command, List args) { + @Override + public String toString() { + return key; + } + + boolean isNpx() { + return "npx".equals(command); + } + + int timeout() { + return isNpx() ? NPX_TIMEOUT : LOCAL_TIMEOUT; + } + } + + static List cliProviders() { + return List.of( + new CliDef("kiro-cli", "kiro-cli", List.of("acp")), + new CliDef("qwen-code", "qwen", List.of("--acp"))); + } + + @ParameterizedTest(name = "ACP initialize: {0}") + @MethodSource("cliProviders") + void testAcpInitialize(CliDef cli) throws Exception { + if (!isCommandAvailable(cli.command())) { + log.warn("跳过 {}: 命令 '{}' 未安装", cli.key(), cli.command()); + org.junit.jupiter.api.Assumptions.assumeTrue( + false, cli.command() + " not installed, skipping"); + } + + Path cwd = tempDir.resolve(cli.key()); + Files.createDirectories(cwd); + + CliProcess process = + new CliProcess(cli.command(), cli.args(), cwd.toString(), Collections.emptyMap()); + + try { + log.info( + "启动 {} (command={} {}, timeout={}s)", + cli.key(), + cli.command(), + cli.args(), + cli.timeout()); + process.start(); + assertTrue(process.isAlive(), cli.key() + " 进程应该处于运行状态"); + + CountDownLatch latch = new CountDownLatch(1); + AtomicReference responseRef = new AtomicReference<>(); + AtomicReference errorRef = new AtomicReference<>(); + + // 订阅 stdout + process.stdout() + .subscribe( + line -> { + log.info("[{}] STDOUT: {}", cli.key(), line); + if (line.contains("\"result\"") && line.contains("\"id\"")) { + responseRef.set(line); + latch.countDown(); + } else if (line.contains("\"error\"")) { + errorRef.set(line); + latch.countDown(); + } + }); + + // 订阅 stderr 用于调试 + process.stderr().subscribe(line -> log.debug("[{}] STDERR: {}", cli.key(), line)); + + // 构建 ACP initialize 请求 + String initRequest = buildInitializeRequest(); + log.info("[{}] 发送 initialize 请求", cli.key()); + process.send(initRequest); + + // 等待响应 + boolean received = latch.await(cli.timeout(), TimeUnit.SECONDS); + + // 如果进程已退出,给出更有用的错误信息 + if (!received && !process.isAlive()) { + fail(cli.key() + " 进程已意外退出,未返回 initialize 响应"); + } + assertTrue(received, cli.key() + " 应在 " + cli.timeout() + " 秒内返回 initialize 响应"); + + // 检查是否收到错误响应 + if (errorRef.get() != null && responseRef.get() == null) { + fail(cli.key() + " 返回了错误响应: " + errorRef.get()); + } + + // 解析并验证响应 + String response = responseRef.get(); + assertNotNull(response, cli.key() + " 响应不应为 null"); + log.info( + "[{}] 收到响应: {}", + cli.key(), + response.length() > 500 ? response.substring(0, 500) + "..." : response); + + JsonNode root = mapper.readTree(response); + assertEquals("2.0", root.get("jsonrpc").asText(), cli.key() + " 响应应为 JSON-RPC 2.0"); + assertEquals(0, root.get("id").asInt(), cli.key() + " 响应 id 应匹配请求 id"); + + JsonNode result = root.get("result"); + assertNotNull(result, cli.key() + " 响应应包含 result 字段"); + assertTrue(result.has("protocolVersion"), cli.key() + " result 应包含 protocolVersion"); + assertTrue( + result.has("agentCapabilities") || result.has("agentInfo"), + cli.key() + " result 应包含 agentCapabilities 或 agentInfo"); + + log.info( + "[{}] ✅ ACP initialize 握手成功! protocolVersion={}", + cli.key(), + result.get("protocolVersion")); + + } finally { + process.close(); + log.info("[{}] 进程已关闭", cli.key()); + } + } + + private String buildInitializeRequest() throws Exception { + var fsNode = mapper.createObjectNode().put("readTextFile", true).put("writeTextFile", true); + var capNode = mapper.createObjectNode().put("terminal", true); + capNode.set("fs", fsNode); + var infoNode = + mapper.createObjectNode() + .put("name", "himarket-test") + .put("title", "HiMarket ACP Test") + .put("version", "1.0.0"); + var paramsNode = mapper.createObjectNode().put("protocolVersion", 1); + paramsNode.set("clientCapabilities", capNode); + paramsNode.set("clientInfo", infoNode); + var rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", 0) + .put("method", "initialize"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + private boolean isCommandAvailable(String command) { + try { + Process p = new ProcessBuilder("which", command).redirectErrorStream(true).start(); + boolean exited = p.waitFor(5, TimeUnit.SECONDS); + return exited && p.exitValue() == 0; + } catch (Exception e) { + return false; + } + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingAuthenticationTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingAuthenticationTest.java new file mode 100644 index 000000000..95b9cfd4b --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingAuthenticationTest.java @@ -0,0 +1,244 @@ +package com.alibaba.himarket.service.hicoding.websocket; + +import static org.junit.jupiter.api.Assertions.*; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * 集成测试:验证各 CLI 在未认证状态下的行为。 + * + *

依赖真实 CLI 工具,使用 {@code mvn test -Dgroups=integration} 显式启用。 + */ +@Tag("integration") +class HiCodingAuthenticationTest { + + private static final Logger log = LoggerFactory.getLogger(HiCodingAuthenticationTest.class); + private static final ObjectMapper mapper = new ObjectMapper(); + private static final int TIMEOUT_SECONDS = 30; + private static final int KIRO_TIMEOUT_SECONDS = 120; // Kiro CLI 在隔离 HOME 下需要更长时间 + + @TempDir Path tempDir; + + record CliDef(String key, String command, List args) { + @Override + public String toString() { + return key; + } + } + + static List cliProviders() { + return List.of( + new CliDef("qodercli", "qodercli", List.of("--acp")), + new CliDef("kiro-cli", "kiro-cli", List.of("acp")), + new CliDef("qwen-code", "qwen", List.of("--acp"))); + } + + @ParameterizedTest(name = "ACP authentication flow: {0}") + @MethodSource("cliProviders") + void testAuthenticationRequired(CliDef cli) throws Exception { + if (!isCommandAvailable(cli.command())) { + log.warn("跳过 {}: 命令 '{}' 未安装", cli.key(), cli.command()); + org.junit.jupiter.api.Assumptions.assumeTrue( + false, cli.command() + " not installed, skipping"); + } + + // 创建隔离的工作目录和 HOME 目录 + Path cwd = tempDir.resolve(cli.key()); + Path isolatedHome = tempDir.resolve(cli.key() + "-home"); + Files.createDirectories(cwd); + Files.createDirectories(isolatedHome); + + // 使用隔离的 HOME 目录启动进程 + Map extraEnv = Map.of("HOME", isolatedHome.toString()); + CliProcess process = new CliProcess(cli.command(), cli.args(), cwd.toString(), extraEnv); + + try { + process.start(); + assertTrue(process.isAlive(), cli.key() + " 进程应该处于运行状态"); + + AtomicInteger requestId = new AtomicInteger(0); + CountDownLatch initLatch = new CountDownLatch(1); + CountDownLatch sessionLatch = new CountDownLatch(1); + + AtomicReference initResponse = new AtomicReference<>(); + AtomicReference sessionResponse = new AtomicReference<>(); + + process.stdout() + .subscribe( + line -> { + log.info("[{}] STDOUT: {}", cli.key(), line); + try { + JsonNode node = mapper.readTree(line); + if (node.has("id")) { + int id = node.get("id").asInt(); + if (id == 0) { + initResponse.set(line); + initLatch.countDown(); + } else if (id == 1) { + sessionResponse.set(line); + sessionLatch.countDown(); + } + } + } catch (Exception e) { + log.debug("[{}] 非 JSON 输出: {}", cli.key(), line); + } + }); + + process.stderr().subscribe(line -> log.debug("[{}] STDERR: {}", cli.key(), line)); + + // Step 1: initialize - 验证返回 authMethods + log.info("[{}] === Step 1: initialize ===", cli.key()); + process.send(buildInitializeRequest(requestId.getAndIncrement())); + + // Kiro CLI 在隔离 HOME 下无法正常工作(已测试 2 分钟超时仍无响应) + // 这是 Kiro CLI 的已知问题,跳过测试 + int timeout = cli.key().equals("kiro-cli") ? KIRO_TIMEOUT_SECONDS : TIMEOUT_SECONDS; + boolean initReceived = initLatch.await(timeout, TimeUnit.SECONDS); + + if (!initReceived && cli.key().equals("kiro-cli")) { + log.warn( + "[{}] ⚠️ initialize 超时 {} 秒,Kiro CLI 在隔离 HOME 下无法正常工作,跳过测试", + cli.key(), + timeout); + return; + } + + assertTrue(initReceived, cli.key() + " 应在超时内返回 initialize 响应"); + assertNotNull(initResponse.get(), cli.key() + " initialize 响应不应为 null"); + + // 解析并验证 authMethods + JsonNode initResult = mapper.readTree(initResponse.get()).get("result"); + assertNotNull(initResult, cli.key() + " initialize 应包含 result"); + + if (initResult.has("authMethods") && !initResult.get("authMethods").isNull()) { + JsonNode authMethods = initResult.get("authMethods"); + assertTrue(authMethods.isArray(), cli.key() + " authMethods 应为数组"); + log.info("[{}] authMethods 数量: {}", cli.key(), authMethods.size()); + + for (JsonNode method : authMethods) { + String id = method.has("id") ? method.get("id").asText() : "N/A"; + String name = method.has("name") ? method.get("name").asText() : "N/A"; + String type = method.has("type") ? method.get("type").asText() : "N/A"; + log.info( + "[{}] authMethod: id={}, name={}, type={}", + cli.key(), + id, + name, + type); + + // 验证 authMethod 基本结构 + assertTrue(method.has("id"), cli.key() + " authMethod 应有 id"); + assertTrue(method.has("name"), cli.key() + " authMethod 应有 name"); + } + } else { + log.warn("[{}] initialize 响应中没有 authMethods 字段", cli.key()); + } + + log.info("[{}] ✅ initialize 成功", cli.key()); + + // Step 2: session/new - 验证未认证时返回错误 + log.info("[{}] === Step 2: session/new (expect auth error) ===", cli.key()); + process.send(buildSessionNewRequest(requestId.getAndIncrement(), cwd.toString())); + boolean sessionReceived = sessionLatch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + assertTrue(sessionReceived, cli.key() + " 应在超时内返回 session/new 响应"); + assertNotNull(sessionResponse.get(), cli.key() + " session/new 响应不应为 null"); + + JsonNode sessionRoot = mapper.readTree(sessionResponse.get()); + + // 验证返回错误 + assertTrue(sessionRoot.has("error"), cli.key() + " 未认证时 session/new 应返回错误"); + + JsonNode error = sessionRoot.get("error"); + int code = error.has("code") ? error.get("code").asInt() : -1; + String message = error.has("message") ? error.get("message").asText() : "Unknown error"; + + log.info("[{}] 错误码: {}, 消息: {}", cli.key(), code, message); + + // -32000 是认证错误的标准错误码 + assertEquals(-32000, code, cli.key() + " 认证错误码应为 -32000"); + + // 验证错误中包含 authMethods + if (error.has("data") && !error.get("data").isNull()) { + JsonNode data = error.get("data"); + if (data.has("authMethods")) { + JsonNode authMethods = data.get("authMethods"); + assertTrue(authMethods.isArray(), cli.key() + " 错误 data 中的 authMethods 应为数组"); + log.info("[{}] 错误响应中包含 authMethods,数量: {}", cli.key(), authMethods.size()); + + // 验证 authMethods 结构 + for (JsonNode method : authMethods) { + String id = method.has("id") ? method.get("id").asText() : "N/A"; + String name = method.has("name") ? method.get("name").asText() : "N/A"; + log.info("[{}] authMethod: id={}, name={}", cli.key(), id, name); + } + } else if (data.has("details")) { + log.info("[{}] 错误详情: {}", cli.key(), data.get("details").asText()); + } + } + + log.info("[{}] ✅ 认证流程验证成功 - session/new 正确返回了认证错误", cli.key()); + + } finally { + process.close(); + log.info("[{}] 进程已关闭", cli.key()); + } + } + + private String buildInitializeRequest(int id) throws Exception { + var fsNode = mapper.createObjectNode().put("readTextFile", true).put("writeTextFile", true); + var capNode = mapper.createObjectNode().put("terminal", true); + capNode.set("fs", fsNode); + var infoNode = + mapper.createObjectNode() + .put("name", "himarket-test") + .put("title", "HiMarket ACP Test") + .put("version", "1.0.0"); + var paramsNode = mapper.createObjectNode().put("protocolVersion", 1); + paramsNode.set("clientCapabilities", capNode); + paramsNode.set("clientInfo", infoNode); + var rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", id) + .put("method", "initialize"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + private String buildSessionNewRequest(int id, String cwd) throws Exception { + var paramsNode = mapper.createObjectNode().put("cwd", cwd); + paramsNode.set("mcpServers", mapper.createArrayNode()); + var rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", id) + .put("method", "session/new"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + private boolean isCommandAvailable(String command) { + try { + Process p = new ProcessBuilder("which", command).redirectErrorStream(true).start(); + boolean exited = p.waitFor(5, TimeUnit.SECONDS); + return exited && p.exitValue() == 0; + } catch (Exception e) { + return false; + } + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingHandshakeInterceptorTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingHandshakeInterceptorTest.java new file mode 100644 index 000000000..9012c0f35 --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingHandshakeInterceptorTest.java @@ -0,0 +1,151 @@ +package com.alibaba.himarket.service.hicoding.websocket; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.net.URI; +import java.util.HashMap; +import java.util.Map; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.http.server.ServerHttpRequest; +import org.springframework.http.server.ServerHttpResponse; +import org.springframework.web.socket.WebSocketHandler; + +/** + * HiCodingHandshakeInterceptor 单元测试。 + * 验证从 WebSocket 握手 URL 中提取查询参数(provider、runtime)并存入 session attributes。 + */ +class HiCodingHandshakeInterceptorTest { + + private HiCodingHandshakeInterceptor interceptor; + private ServerHttpResponse response; + private WebSocketHandler wsHandler; + + @BeforeEach + void setUp() { + interceptor = new HiCodingHandshakeInterceptor(); + response = mock(ServerHttpResponse.class); + wsHandler = mock(WebSocketHandler.class); + } + + // ===== runtime 参数提取 ===== + + @Test + void beforeHandshake_runtimeRemote_storedInAttributes() throws Exception { + Map attributes = new HashMap<>(); + ServerHttpRequest request = mockRequest("ws://localhost/acp?runtime=remote"); + + interceptor.beforeHandshake(request, response, wsHandler, attributes); + + assertEquals("remote", attributes.get("runtime")); + } + + @Test + void beforeHandshake_runtimeK8s_storedInAttributes() throws Exception { + Map attributes = new HashMap<>(); + ServerHttpRequest request = mockRequest("ws://localhost/acp?runtime=k8s"); + + interceptor.beforeHandshake(request, response, wsHandler, attributes); + + assertEquals("k8s", attributes.get("runtime")); + } + + @Test + void beforeHandshake_noRuntimeParam_attributeNotSet() throws Exception { + Map attributes = new HashMap<>(); + ServerHttpRequest request = mockRequest("ws://localhost/acp"); + + interceptor.beforeHandshake(request, response, wsHandler, attributes); + + assertNull(attributes.get("runtime")); + } + + @Test + void beforeHandshake_blankRuntimeParam_attributeNotSet() throws Exception { + Map attributes = new HashMap<>(); + ServerHttpRequest request = mockRequest("ws://localhost/acp?runtime="); + + interceptor.beforeHandshake(request, response, wsHandler, attributes); + + assertNull(attributes.get("runtime")); + } + + @Test + void beforeHandshake_runtimeUpperCase_storedAsIs() throws Exception { + Map attributes = new HashMap<>(); + ServerHttpRequest request = mockRequest("ws://localhost/acp?runtime=K8S"); + + interceptor.beforeHandshake(request, response, wsHandler, attributes); + + assertEquals("K8S", attributes.get("runtime")); + } + + // ===== runtime + provider 同时提取 ===== + + @Test + void beforeHandshake_runtimeAndProvider_bothStored() throws Exception { + Map attributes = new HashMap<>(); + ServerHttpRequest request = mockRequest("ws://localhost/acp?provider=kiro-cli&runtime=k8s"); + + interceptor.beforeHandshake(request, response, wsHandler, attributes); + + assertEquals("kiro-cli", attributes.get("provider")); + assertEquals("k8s", attributes.get("runtime")); + } + + @Test + void beforeHandshake_allParams_allStored() throws Exception { + Map attributes = new HashMap<>(); + ServerHttpRequest request = + mockRequest("ws://localhost/acp?token=sometoken&provider=qodercli&runtime=remote"); + + interceptor.beforeHandshake(request, response, wsHandler, attributes); + + assertEquals("qodercli", attributes.get("provider")); + assertEquals("remote", attributes.get("runtime")); + } + + // ===== provider 参数提取(回归测试) ===== + + @Test + void beforeHandshake_providerParam_storedInAttributes() throws Exception { + Map attributes = new HashMap<>(); + ServerHttpRequest request = mockRequest("ws://localhost/acp?provider=kiro-cli"); + + interceptor.beforeHandshake(request, response, wsHandler, attributes); + + assertEquals("kiro-cli", attributes.get("provider")); + } + + @Test + void beforeHandshake_noProvider_attributeNotSet() throws Exception { + Map attributes = new HashMap<>(); + ServerHttpRequest request = mockRequest("ws://localhost/acp"); + + interceptor.beforeHandshake(request, response, wsHandler, attributes); + + assertNull(attributes.get("provider")); + } + + // ===== 无 token 连接拒绝 ===== + + @Test + void beforeHandshake_noToken_rejectsConnection() throws Exception { + Map attributes = new HashMap<>(); + ServerHttpRequest request = mockRequest("ws://localhost/acp?runtime=remote"); + + boolean result = interceptor.beforeHandshake(request, response, wsHandler, attributes); + + assertFalse(result); + assertNull(attributes.get("userId")); + } + + // ===== Helper ===== + + private ServerHttpRequest mockRequest(String uriString) throws Exception { + ServerHttpRequest request = mock(ServerHttpRequest.class); + when(request.getURI()).thenReturn(new URI(uriString)); + return request; + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingPromptExecutionTest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingPromptExecutionTest.java new file mode 100644 index 000000000..b7b09c3dd --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingPromptExecutionTest.java @@ -0,0 +1,245 @@ +package com.alibaba.himarket.service.hicoding.websocket; + +import static org.junit.jupiter.api.Assertions.*; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * 集成测试:验证各 CLI 通过 ACP 协议执行 prompt 的完整流程。 + * + *

依赖真实 CLI 工具,使用 {@code mvn test -Dgroups=integration} 显式启用。 + */ +@Tag("integration") +class HiCodingPromptExecutionTest { + + private static final Logger log = LoggerFactory.getLogger(HiCodingPromptExecutionTest.class); + private static final ObjectMapper mapper = new ObjectMapper(); + private static final int TIMEOUT_SECONDS = 60; + + @TempDir Path tempDir; + + record CliDef(String key, String command, List args) { + @Override + public String toString() { + return key; + } + } + + static List cliProviders() { + return List.of( + new CliDef("kiro-cli", "kiro-cli", List.of("acp")), + new CliDef("qwen-code", "qwen", List.of("--acp"))); + } + + @ParameterizedTest(name = "ACP prompt execution: {0}") + @MethodSource("cliProviders") + void testPromptExecution(CliDef cli) throws Exception { + if (!isCommandAvailable(cli.command())) { + log.warn("跳过 {}: 命令 '{}' 未安装", cli.key(), cli.command()); + org.junit.jupiter.api.Assumptions.assumeTrue( + false, cli.command() + " not installed, skipping"); + } + + Path cwd = tempDir.resolve(cli.key()); + Files.createDirectories(cwd); + + CliProcess process = + new CliProcess(cli.command(), cli.args(), cwd.toString(), Collections.emptyMap()); + + try { + process.start(); + assertTrue(process.isAlive(), cli.key() + " 进程应该处于运行状态"); + + AtomicInteger requestId = new AtomicInteger(0); + CountDownLatch initLatch = new CountDownLatch(1); + CountDownLatch sessionLatch = new CountDownLatch(1); + CountDownLatch promptLatch = new CountDownLatch(1); + + AtomicReference initResponse = new AtomicReference<>(); + AtomicReference sessionResponse = new AtomicReference<>(); + AtomicReference promptResponse = new AtomicReference<>(); + AtomicReference errorResponse = new AtomicReference<>(); + + process.stdout() + .subscribe( + line -> { + log.info("[{}] STDOUT: {}", cli.key(), line); + try { + JsonNode node = mapper.readTree(line); + if (node.has("id")) { + int id = node.get("id").asInt(); + if (id == 0) { + initResponse.set(line); + initLatch.countDown(); + } else if (id == 1) { + sessionResponse.set(line); + sessionLatch.countDown(); + } else if (id == 2) { + if (node.has("result") || node.has("error")) { + promptResponse.set(line); + promptLatch.countDown(); + } + } + } else if (node.has("error")) { + log.error("[{}] 错误响应: {}", cli.key(), line); + errorResponse.set(line); + initLatch.countDown(); + sessionLatch.countDown(); + promptLatch.countDown(); + } + } catch (Exception e) { + // 非 JSON 行,可能是流式输出,记录但不处理 + log.debug("[{}] 非 JSON 输出: {}", cli.key(), line); + } + }); + + process.stderr().subscribe(line -> log.debug("[{}] STDERR: {}", cli.key(), line)); + + // Step 1: initialize + log.info("[{}] === Step 1: initialize ===", cli.key()); + process.send(buildInitializeRequest(requestId.getAndIncrement())); + boolean initReceived = initLatch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + assertTrue(initReceived, cli.key() + " 应在超时内返回 initialize 响应"); + assertNotNull(initResponse.get(), cli.key() + " initialize 响应不应为 null"); + log.info("[{}] ✅ initialize 成功", cli.key()); + + // Step 2: session/new + log.info("[{}] === Step 2: session/new ===", cli.key()); + process.send(buildSessionNewRequest(requestId.getAndIncrement(), cwd.toString())); + boolean sessionReceived = sessionLatch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + assertTrue(sessionReceived, cli.key() + " 应在超时内返回 session/new 响应"); + assertNotNull(sessionResponse.get(), cli.key() + " session/new 响应不应为 null"); + + // 解析 sessionId + JsonNode sessionResult = mapper.readTree(sessionResponse.get()).get("result"); + String sessionId = sessionResult.get("sessionId").asText(); + log.info("[{}] sessionId = {}", cli.key(), sessionId); + log.info("[{}] ✅ session/new 成功", cli.key()); + + // Step 3: session/prompt + log.info("[{}] === Step 3: session/prompt ===", cli.key()); + String simplePrompt = "Hello, please respond with 'Hi from " + cli.key() + "'"; + process.send( + buildSessionPromptRequest( + requestId.getAndIncrement(), sessionId, simplePrompt)); + + boolean promptReceived = promptLatch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + assertTrue(promptReceived, cli.key() + " 应在超时内返回 session/prompt 响应"); + + // 解析 prompt 响应 + String rawPromptResponse = promptResponse.get(); + assertNotNull(rawPromptResponse, cli.key() + " session/prompt 响应不应为 null"); + + JsonNode promptRoot = mapper.readTree(rawPromptResponse); + log.info("[{}] session/prompt 响应: {}", cli.key(), rawPromptResponse); + + if (promptRoot.has("error")) { + JsonNode error = promptRoot.get("error"); + int code = error.has("code") ? error.get("code").asInt() : -1; + String message = + error.has("message") ? error.get("message").asText() : "Unknown error"; + + // -32000 表示需要认证,这是预期的行为 + if (code == -32000) { + log.info("[{}] ⚠️ session/prompt 需要认证 (code=-32000): {}", cli.key(), message); + log.info("[{}] ✅ 测试通过 - CLI 正确返回了认证需求", cli.key()); + return; + } + + fail(cli.key() + " session/prompt 返回错误: " + message); + } + + JsonNode promptResult = promptRoot.get("result"); + assertNotNull(promptResult, cli.key() + " session/prompt 应包含 result"); + + // 验证响应基本结构 - session/prompt 返回的是 stopReason + if (promptResult.has("stopReason")) { + String stopReason = promptResult.get("stopReason").asText(); + log.info("[{}] stopReason: {}", cli.key(), stopReason); + } + + log.info("[{}] ✅ session/prompt 成功", cli.key()); + + } finally { + process.close(); + log.info("[{}] 进程已关闭", cli.key()); + } + } + + private String buildInitializeRequest(int id) throws Exception { + var fsNode = mapper.createObjectNode().put("readTextFile", true).put("writeTextFile", true); + var capNode = mapper.createObjectNode().put("terminal", true); + capNode.set("fs", fsNode); + var infoNode = + mapper.createObjectNode() + .put("name", "himarket-test") + .put("title", "HiMarket ACP Test") + .put("version", "1.0.0"); + var paramsNode = mapper.createObjectNode().put("protocolVersion", 1); + paramsNode.set("clientCapabilities", capNode); + paramsNode.set("clientInfo", infoNode); + var rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", id) + .put("method", "initialize"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + private String buildSessionNewRequest(int id, String cwd) throws Exception { + var paramsNode = mapper.createObjectNode().put("cwd", cwd); + paramsNode.set("mcpServers", mapper.createArrayNode()); + var rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", id) + .put("method", "session/new"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + private String buildSessionPromptRequest(int id, String sessionId, String text) + throws Exception { + // prompt 是 ContentBlock 数组,格式为 [{"type": "text", "text": "..."}] + var textBlock = mapper.createObjectNode().put("type", "text").put("text", text); + var promptArray = mapper.createArrayNode().add(textBlock); + + var paramsNode = mapper.createObjectNode(); + paramsNode.put("sessionId", sessionId); + paramsNode.set("prompt", promptArray); + + var rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", id) + .put("method", "session/prompt"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + private boolean isCommandAvailable(String command) { + try { + Process p = new ProcessBuilder("which", command).redirectErrorStream(true).start(); + boolean exited = p.waitFor(5, TimeUnit.SECONDS); + return exited && p.exitValue() == 0; + } catch (Exception e) { + return false; + } + } +} diff --git a/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingWebSocketE2ETest.java b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingWebSocketE2ETest.java new file mode 100644 index 000000000..b9cf96cdd --- /dev/null +++ b/himarket-server/src/test/java/com/alibaba/himarket/service/hicoding/websocket/HiCodingWebSocketE2ETest.java @@ -0,0 +1,167 @@ +package com.alibaba.himarket.service.hicoding.websocket; + +import static org.junit.jupiter.api.Assertions.*; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.net.URI; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import org.junit.jupiter.api.Assumptions; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.socket.TextMessage; +import org.springframework.web.socket.WebSocketHttpHeaders; +import org.springframework.web.socket.WebSocketSession; +import org.springframework.web.socket.client.standard.StandardWebSocketClient; +import org.springframework.web.socket.handler.TextWebSocketHandler; + +/** + * 端到端测试:通过 WebSocket 连接后端验证 ACP 握手。 + * + *

依赖运行中的后端服务,使用 {@code mvn test -Dgroups=integration} 显式启用。 + */ +@Tag("integration") +class HiCodingWebSocketE2ETest { + + private static final Logger log = LoggerFactory.getLogger(HiCodingWebSocketE2ETest.class); + private static final ObjectMapper mapper = new ObjectMapper(); + private static final String WS_BASE = "ws://localhost:8080/ws/acp"; + + record ProviderDef(String key, int timeoutSec) { + @Override + public String toString() { + return key; + } + } + + static List providers() { + return List.of(new ProviderDef("qodercli", 15), new ProviderDef("kiro-cli", 15)); + } + + @ParameterizedTest(name = "E2E WebSocket ACP: {0}") + @MethodSource("providers") + void testWebSocketAcpInitialize(ProviderDef provider) throws Exception { + if (!isServerRunning()) { + Assumptions.assumeTrue(false, "后端服务未在 localhost:8080 运行,跳过 E2E 测试"); + } + + String wsUrl = WS_BASE + "?provider=" + provider.key(); + log.info("[{}] 连接 WebSocket: {}", provider.key(), wsUrl); + + CountDownLatch responseLatch = new CountDownLatch(1); + AtomicReference responseRef = new AtomicReference<>(); + + StandardWebSocketClient wsClient = new StandardWebSocketClient(); + WebSocketSession session = + wsClient.execute( + new TextWebSocketHandler() { + @Override + protected void handleTextMessage( + WebSocketSession s, TextMessage message) { + String payload = message.getPayload(); + log.info( + "[{}] 收到消息: {}", + provider.key(), + payload.length() > 300 + ? payload.substring(0, 300) + "..." + : payload); + if (payload.contains("\"result\"") + && payload.contains("\"id\"")) { + responseRef.set(payload); + responseLatch.countDown(); + } + } + }, + new WebSocketHttpHeaders(), + URI.create(wsUrl)) + .get(10, TimeUnit.SECONDS); + + try { + assertTrue(session.isOpen(), provider.key() + " WebSocket 应已连接"); + log.info("[{}] WebSocket 已连接", provider.key()); + + // 发送 ACP initialize 请求 + String initRequest = buildInitializeRequest(); + log.info("[{}] 发送 initialize 请求", provider.key()); + session.sendMessage(new TextMessage(initRequest)); + + // 等待响应 + boolean received = responseLatch.await(provider.timeoutSec(), TimeUnit.SECONDS); + assertTrue( + received, + provider.key() + " 应在 " + provider.timeoutSec() + " 秒内返回 initialize 响应"); + + // 验证响应 + String response = responseRef.get(); + assertNotNull(response, provider.key() + " 响应不应为 null"); + + JsonNode root = mapper.readTree(response); + assertEquals("2.0", root.get("jsonrpc").asText()); + assertEquals(0, root.get("id").asInt()); + + JsonNode result = root.get("result"); + assertNotNull(result); + assertTrue(result.has("protocolVersion")); + assertEquals(1, result.get("protocolVersion").asInt()); + + if (result.has("agentInfo")) { + log.info( + "[{}] ✅ E2E ACP 握手成功! agent={}, version={}", + provider.key(), + result.get("agentInfo").get("name").asText(), + result.get("agentInfo").get("version").asText()); + } else { + log.info( + "[{}] ✅ E2E ACP 握手成功! protocolVersion={}", + provider.key(), + result.get("protocolVersion")); + } + + } finally { + if (session.isOpen()) { + session.close(); + } + log.info("[{}] WebSocket 已关闭", provider.key()); + } + } + + private String buildInitializeRequest() throws Exception { + var fsNode = mapper.createObjectNode().put("readTextFile", true).put("writeTextFile", true); + var capNode = mapper.createObjectNode().put("terminal", true); + capNode.set("fs", fsNode); + var infoNode = + mapper.createObjectNode() + .put("name", "himarket-e2e-test") + .put("title", "HiMarket E2E Test") + .put("version", "1.0.0"); + var paramsNode = mapper.createObjectNode().put("protocolVersion", 1); + paramsNode.set("clientCapabilities", capNode); + paramsNode.set("clientInfo", infoNode); + var rootNode = + mapper.createObjectNode() + .put("jsonrpc", "2.0") + .put("id", 0) + .put("method", "initialize"); + rootNode.set("params", paramsNode); + return mapper.writeValueAsString(rootNode); + } + + @SuppressWarnings("deprecation") + private boolean isServerRunning() { + try { + var conn = new java.net.URL("http://localhost:8080/cli-providers").openConnection(); + conn.setConnectTimeout(3000); + conn.setReadTimeout(3000); + conn.connect(); + return true; + } catch (Exception e) { + return false; + } + } +} diff --git a/himarket-web/himarket-admin/.npmrc b/himarket-web/himarket-admin/.npmrc new file mode 100644 index 000000000..521a9f7c0 --- /dev/null +++ b/himarket-web/himarket-admin/.npmrc @@ -0,0 +1 @@ +legacy-peer-deps=true diff --git a/himarket-web/himarket-admin/package.json b/himarket-web/himarket-admin/package.json index c0fa83134..14dc18a82 100644 --- a/himarket-web/himarket-admin/package.json +++ b/himarket-web/himarket-admin/package.json @@ -15,12 +15,15 @@ "dependencies": { "@ant-design/icons": "^6.1.0", "@babel/runtime": "^7.24.0", + "@monaco-editor/react": "^4.7.0", "antd": "^6.0.1", "axios": "^1.7.9", "clsx": "^2.1.1", "dayjs": "^1.11.10", "echarts": "^5.5.0", + "github-markdown-css": "^5.9.0", "helmet": "^7.1.0", + "highlight.js": "^11.11.1", "js-yaml": "^4.1.0", "monaco-editor": "^0.52.2", "postcss": "^8.5.6", @@ -30,6 +33,8 @@ "react-markdown-editor-lite": "^1.3.4", "react-monaco-editor": "^0.59.0", "react-router-dom": "^6.28.0", + "rehype-highlight": "^7.0.2", + "remark-frontmatter": "^5.0.0", "remark-gfm": "^4.0.1", "swagger-ui-react": "^5.29.0", "tailwind-merge": "^3.3.1", @@ -49,21 +54,22 @@ "compression": "^1.8.0", "cors": "^2.8.5", "eslint": "^9.30.1", - "eslint-plugin-react-hooks": "^5.2.0", - "eslint-plugin-react-refresh": "^0.4.20", "eslint-config-prettier": "^10.1.8", "eslint-plugin-prettier": "^5.5.4", - "lint-staged": "^16.2.7", - "prettier": "^3.7.4", + "eslint-plugin-react-hooks": "^5.2.0", + "eslint-plugin-react-refresh": "^0.4.20", "express": "^4.21.2", "globals": "^16.3.0", + "lint-staged": "^16.2.7", "path": "^0.12.7", "postcss": "^8.5.6", + "prettier": "^3.7.4", "tailwindcss": "^3.4.17", "typescript": "^5.6.3", - "url": "^0.11.4", "typescript-eslint": "^8.35.1", - "vite": "^4.5.14" + "url": "^0.11.4", + "vite": "^4.5.14", + "vite-plugin-monaco-editor": "^1.1.0" }, "lint-staged": { "*.{js,jsx,ts,tsx}": [ @@ -75,4 +81,4 @@ ] }, "repository": "https://github.com/lexburner/himarket.git" -} \ No newline at end of file +} diff --git a/himarket-web/himarket-admin/proxy.conf b/himarket-web/himarket-admin/proxy.conf index 30315e3ff..96d51778b 100755 --- a/himarket-web/himarket-admin/proxy.conf +++ b/himarket-web/himarket-admin/proxy.conf @@ -7,10 +7,13 @@ location ~ .*\.(js|css|gif|jpg|jpeg|png|svg|json|otf|ico)$ { root /app; + expires 365d; + add_header Cache-Control "public, immutable"; } location / { try_files $uri $uri/ /index.html; root /app; index index.html index.htm; + add_header Cache-Control "no-cache"; } diff --git a/himarket-web/himarket-admin/src/aliyunThemeToken.ts b/himarket-web/himarket-admin/src/aliyunThemeToken.ts index 3f8209b4a..9ed20bf73 100644 --- a/himarket-web/himarket-admin/src/aliyunThemeToken.ts +++ b/himarket-web/himarket-admin/src/aliyunThemeToken.ts @@ -38,7 +38,7 @@ export default { "colorFillSecondary": "#E5E5E5", "colorFill": "#E5E5E5", "colorBgLayout": "#F7F7F7", - "colorBgSpotlight": "ffffff", + "colorBgSpotlight": "rgba(0, 0, 0, 0.85)", "colorSuccess": "#23b066", "colorSuccessBg": "#EBFFF6", "colorSuccessBgHover": "#D1F4E1", diff --git a/himarket-web/himarket-admin/src/components/api-product/ApiProductFormModal.tsx b/himarket-web/himarket-admin/src/components/api-product/ApiProductFormModal.tsx index e087af821..0046b1d76 100644 --- a/himarket-web/himarket-admin/src/components/api-product/ApiProductFormModal.tsx +++ b/himarket-web/himarket-admin/src/components/api-product/ApiProductFormModal.tsx @@ -17,6 +17,7 @@ import { getProductCategories } from "@/lib/productCategoryApi"; import type { ApiProduct } from "@/types/api-product"; import type { ProductCategory } from "@/types/product-category"; import ModelFeatureForm from "./ModelFeatureForm"; +import SkillConfigForm from "./SkillConfigForm"; interface ApiProductFormModalProps { visible: boolean; @@ -71,6 +72,7 @@ export default function ApiProductFormModal({ type: initialData.type, autoApprove: initialData.autoApprove, feature: initialData.feature, + skillConfig: initialData.skillConfig, }); }, 300); @@ -198,6 +200,7 @@ export default function ApiProductFormModal({ const { icon, iconUrl, categories, ...otherValues } = values; + if (isEditMode) { let params = { ...otherValues }; @@ -279,13 +282,15 @@ export default function ApiProductFormModal({ - - - + {productType !== 'AGENT_SKILL' && ( + + + + )} - REST API + Model API MCP Server + Agent Skill Agent API - Model API + REST API @@ -341,7 +347,7 @@ export default function ApiProductFormModal({ - - + } - + {productType !== 'AGENT_SKILL' && )} - + } {/* 图片预览弹窗 */} {previewImage && ( @@ -510,7 +516,8 @@ export default function ApiProductFormModal({ )} {/* Feature Configuration */} - {productType === 'MODEL_API' && } + {productType === 'MODEL_API' && } + {productType === 'AGENT_SKILL' && } ); diff --git a/himarket-web/himarket-admin/src/components/api-product/ApiProductLinkApi.tsx b/himarket-web/himarket-admin/src/components/api-product/ApiProductLinkApi.tsx index 72bc0a6f9..d0d39119e 100644 --- a/himarket-web/himarket-admin/src/components/api-product/ApiProductLinkApi.tsx +++ b/himarket-web/himarket-admin/src/components/api-product/ApiProductLinkApi.tsx @@ -281,6 +281,9 @@ export function ApiProductLinkApi({ apiProduct, linkedService, onLinkedServiceUp } else if (apiProduct.type === 'MODEL_API') { // Model API 支持 APIG_AI 网关、HIGRESS 网关、ADP AI 网关、APSARA 网关 result = res.data?.content?.filter?.((item: Gateway) => item.gatewayType === 'APIG_AI' || item.gatewayType === 'HIGRESS' || item.gatewayType === 'ADP_AI_GATEWAY' || item.gatewayType === 'APSARA_GATEWAY'); + } else if (apiProduct.type === 'AGENT_SKILL') { + // Agent Skill 不需要关联网关 + result = []; } else { // MCP Server 支持 HIGRESS、APIG_AI、ADP AI 网关、APSARA 网关 result = res.data?.content?.filter?.((item: Gateway) => item.gatewayType === 'HIGRESS' || item.gatewayType === 'APIG_AI' || item.gatewayType === 'ADP_AI_GATEWAY' || item.gatewayType === 'APSARA_GATEWAY'); diff --git a/himarket-web/himarket-admin/src/components/api-product/ApiProductLinkNacos.tsx b/himarket-web/himarket-admin/src/components/api-product/ApiProductLinkNacos.tsx new file mode 100644 index 000000000..126f39fc2 --- /dev/null +++ b/himarket-web/himarket-admin/src/components/api-product/ApiProductLinkNacos.tsx @@ -0,0 +1,162 @@ +import { useState, useEffect } from 'react' +import { Card, Button, Modal, Form, Select, message, Descriptions, Empty } from 'antd' +import { LinkOutlined } from '@ant-design/icons' +import type { ApiProduct } from '@/types/api-product' +import type { NacosInstance } from '@/types/gateway' +import { nacosApi, apiProductApi } from '@/lib/api' + +interface ApiProductLinkNacosProps { + apiProduct: ApiProduct + handleRefresh: () => void +} + +export function ApiProductLinkNacos({ apiProduct, handleRefresh }: ApiProductLinkNacosProps) { + const [modalVisible, setModalVisible] = useState(false) + const [form] = Form.useForm() + const [nacosInstances, setNacosInstances] = useState([]) + const [namespaces, setNamespaces] = useState([]) + const [nacosLoading, setNacosLoading] = useState(false) + const [nsLoading, setNsLoading] = useState(false) + const [saving, setSaving] = useState(false) + + // 当前关联的 Nacos 信息 + const skillConfig = apiProduct.skillConfig + const currentNacosId = skillConfig?.nacosId + const currentNamespace = skillConfig?.namespace || 'public' + + // 查找当前关联的 Nacos 实例名称 + const [currentNacosName, setCurrentNacosName] = useState('') + + useEffect(() => { + if (currentNacosId) { + nacosApi.getNacos({ page: 1, size: 1000 }).then((res: any) => { + const list = res.data?.content || [] + setNacosInstances(list) + const found = list.find((n: NacosInstance) => n.nacosId === currentNacosId) + setCurrentNacosName(found?.nacosName || currentNacosId) + }).catch(() => {}) + } + }, [currentNacosId]) + + const fetchNacosInstances = async () => { + setNacosLoading(true) + try { + const res = await nacosApi.getNacos({ page: 1, size: 1000 }) + setNacosInstances(res.data?.content || []) + } catch { + } finally { + setNacosLoading(false) + } + } + + const handleNacosChange = async (nacosId: string) => { + form.setFieldValue('namespace', undefined) + setNamespaces([]) + setNsLoading(true) + try { + const res = await nacosApi.getNamespaces(nacosId, { page: 1, size: 1000 }) + setNamespaces(res.data?.content || []) + } catch { + } finally { + setNsLoading(false) + } + } + + const openModal = () => { + fetchNacosInstances() + if (currentNacosId) { + form.setFieldsValue({ nacosId: currentNacosId, namespace: currentNamespace }) + handleNacosChange(currentNacosId) + } + setModalVisible(true) + } + + const handleSave = async () => { + const values = await form.validateFields() + setSaving(true) + try { + await apiProductApi.updateSkillNacos(apiProduct.productId, { + nacosId: values.nacosId, + namespace: values.namespace, + }) + message.success('Nacos 关联已更新') + setModalVisible(false) + handleRefresh() + } catch { + } finally { + setSaving(false) + } + } + + return ( +

+

Link Nacos

+

管理该 Skill 关联的 Nacos 实例和命名空间

+ + {currentNacosId ? ( + + + {currentNacosName} + {currentNamespace || 'public'} + {currentNacosId} + +
+ +
+
+ ) : ( + + + + + + )} + + setModalVisible(false)} + confirmLoading={saving} + okText="确认" + cancelText="取消" + > +
+ + ({ + label: ns.namespaceName || ns.namespaceId || 'public', + value: ns.namespaceId || '', + }))} + /> + + +
+
+ ) +} diff --git a/himarket-web/himarket-admin/src/components/api-product/ApiProductOverview.tsx b/himarket-web/himarket-admin/src/components/api-product/ApiProductOverview.tsx index 183d71986..63c5cff22 100644 --- a/himarket-web/himarket-admin/src/components/api-product/ApiProductOverview.tsx +++ b/himarket-web/himarket-admin/src/components/api-product/ApiProductOverview.tsx @@ -28,7 +28,7 @@ interface ApiProductOverviewProps { export function ApiProductOverview({ apiProduct, linkedService, onEdit }: ApiProductOverviewProps) { const [portalCount, setPortalCount] = useState(0) - const [subscriberCount] = useState(0) + const [subscriberCount, setSubscriberCount] = useState(0) const [productCategories, setProductCategories] = useState([]) const navigate = useNavigate() @@ -37,6 +37,7 @@ export function ApiProductOverview({ apiProduct, linkedService, onEdit }: ApiPro if (apiProduct.productId) { fetchPublishedPortals() fetchProductCategories() + fetchSubscriberCount() } }, [apiProduct.productId, apiProduct]) @@ -49,6 +50,14 @@ export function ApiProductOverview({ apiProduct, linkedService, onEdit }: ApiPro } } + const fetchSubscriberCount = async () => { + try { + const res = await apiProductApi.getProductSubscriptions(apiProduct.productId, { page: 0, size: 1 }) + setSubscriberCount(res.data.totalElements || 0) + } catch (error) { + } + } + const fetchProductCategories = async () => { try { // 获取产品关联的类别信息 @@ -132,7 +141,8 @@ export function ApiProductOverview({ apiProduct, linkedService, onEdit }: ApiPro {apiProduct.type === 'REST_API' ? 'REST API' : apiProduct.type === 'AGENT_API' ? 'Agent API' : - apiProduct.type === 'MODEL_API' ? 'Model API' : 'MCP Server'} + apiProduct.type === 'MODEL_API' ? 'Model API' : + apiProduct.type === 'AGENT_SKILL' ? 'Agent Skill' : 'MCP Server'} 状态:
@@ -148,22 +158,29 @@ export function ApiProductOverview({ apiProduct, linkedService, onEdit }: ApiPro
- -
- 自动审批订阅: -
- {apiProduct.autoApprove === true ? ( - - ) : ( - - )} - - {apiProduct.autoApprove === true ? '已开启' : '已关闭'} - + + {apiProduct.type !== 'AGENT_SKILL' ? ( +
+ 自动审批订阅: +
+ {apiProduct.autoApprove === true ? ( + + ) : ( + + )} + + {apiProduct.autoApprove === true ? '已开启' : '已关闭'} + +
+ 创建时间: + {formatDateTime(apiProduct.createAt)}
- 创建时间: - {formatDateTime(apiProduct.createAt)} -
+ ) : ( +
+ 创建时间: + {formatDateTime(apiProduct.createAt)} +
+ )}
产品类别: @@ -200,6 +217,7 @@ export function ApiProductOverview({ apiProduct, linkedService, onEdit }: ApiPro apiProduct.feature.modelFeature.maxTokens && `${apiProduct.feature.modelFeature.maxTokens} tokens`, apiProduct.feature.modelFeature.temperature !== null && apiProduct.feature.modelFeature.temperature !== undefined && `temperature ${apiProduct.feature.modelFeature.temperature}`, apiProduct.feature.modelFeature.webSearch && '联网搜索', + apiProduct.feature.modelFeature.enableThinking && '深度思考', apiProduct.feature.modelFeature.enableMultiModal && '多模态' ].filter(Boolean).map((param, index, array) => ( @@ -230,49 +248,51 @@ export function ApiProductOverview({ apiProduct, linkedService, onEdit }: ApiPro
- {/* 统计数据 */} - -
- { - navigate(`/api-products/detail?productId=${apiProduct.productId}&tab=portal`) - }} - > - } - valueStyle={{ color: '#1677ff', fontSize: '24px' }} - /> - - - - { - navigate(`/api-products/detail?productId=${apiProduct.productId}&tab=link-api`) - }} - > - } - valueStyle={{ color: '#1677ff', fontSize: '24px' }} - /> - - - - - } - valueStyle={{ color: '#1677ff', fontSize: '24px' }} - /> - - - + {/* 统计数据 - AGENT_SKILL 不展示 */} + {apiProduct.type !== 'AGENT_SKILL' && ( + + + { + navigate(`/api-products/detail?productId=${apiProduct.productId}&tab=portal`) + }} + > + } + valueStyle={{ color: '#1677ff', fontSize: '24px' }} + /> + + + + { + navigate(`/api-products/detail?productId=${apiProduct.productId}&tab=link-api`) + }} + > + } + valueStyle={{ color: '#1677ff', fontSize: '24px' }} + /> + + + + + } + valueStyle={{ color: '#1677ff', fontSize: '24px' }} + /> + + + + )} ) diff --git a/himarket-web/himarket-admin/src/components/api-product/ApiProductSkillPackage.tsx b/himarket-web/himarket-admin/src/components/api-product/ApiProductSkillPackage.tsx new file mode 100644 index 000000000..98b5cdf25 --- /dev/null +++ b/himarket-web/himarket-admin/src/components/api-product/ApiProductSkillPackage.tsx @@ -0,0 +1,299 @@ +import { useState, useEffect, useRef } from 'react' +import { Upload, message, Spin, Tooltip, Alert, Button as AntButton } from 'antd' +import { InboxOutlined, FolderOutlined, FolderOpenOutlined, FileOutlined } from '@ant-design/icons' +import ReactMarkdown from 'react-markdown' +import remarkGfm from 'remark-gfm' +import rehypeHighlight from 'rehype-highlight' +import MonacoEditor from 'react-monaco-editor' +import { skillApi } from '@/lib/api' +import 'github-markdown-css/github-markdown-light.css' +import 'highlight.js/styles/github.css' + +interface SkillFileTreeNode { + name: string + path: string + type: 'file' | 'directory' + encoding?: string + size?: number + children?: SkillFileTreeNode[] +} + +interface FileContent { + path: string + content: string + encoding: string + size: number +} + +interface ApiProductSkillPackageProps { + apiProduct: import('@/types/api-product').ApiProduct + onUploadSuccess?: () => void +} + +// ── 自定义文件树(与前台 SkillFileTree 对齐)───────────────── +interface TreeNodeProps { + node: SkillFileTreeNode + selectedPath?: string + onSelect: (path: string) => void + depth: number +} + +function TreeNode({ node, selectedPath, onSelect, depth }: TreeNodeProps) { + const [expanded, setExpanded] = useState(true) + const isDir = node.type === 'directory' + const isSelected = node.path === selectedPath + + return ( +
+ +
isDir ? setExpanded(v => !v) : onSelect(node.path)} + > + {isDir + ? expanded + ? + : + : + } + {node.name} +
+
+ {isDir && expanded && node.children && node.children.length > 0 && ( +
+ {node.children.map(child => ( + + ))} +
+ )} +
+ ) +} + +function SkillFileTree({ nodes, selectedPath, onSelect }: { nodes: SkillFileTreeNode[]; selectedPath?: string; onSelect: (p: string) => void }) { + return ( +
+ {nodes.map(node => ( + + ))} +
+ ) +} + +function parseFrontMatter(content: string): { entries: [string, string][]; body: string } { + const t = content.trim() + if (!t.startsWith('---')) return { entries: [], body: t } + const end = t.indexOf('---', 3) + if (end === -1) return { entries: [], body: t } + const yamlBlock = t.substring(3, end).trim() + const body = t.substring(end + 3).trim() + const entries: [string, string][] = yamlBlock.split('\n').flatMap((line) => { + const idx = line.indexOf(':') + if (idx <= 0) return [] + const k = line.substring(0, idx).trim() + let v = line.substring(idx + 1).trim() + if ((v.startsWith('"') && v.endsWith('"')) || (v.startsWith("'") && v.endsWith("'"))) v = v.slice(1, -1) + return [[k, v]] as [string, string][] + }) + return { entries, body } +} + +function findNode(nodes: SkillFileTreeNode[], path: string): SkillFileTreeNode | null { + for (const node of nodes) { + if (node.path === path) return node + if (node.children) { const f = findNode(node.children, path); if (f) return f } + } + return null +} + +export function ApiProductSkillPackage({ apiProduct, onUploadSuccess }: ApiProductSkillPackageProps) { + const productId = apiProduct.productId + const hasNacos = !!(apiProduct.skillConfig?.nacosId) + const [fileTree, setFileTree] = useState([]) + const [selectedPath, setSelectedPath] = useState() + const [selectedFile, setSelectedFile] = useState(null) + const [loadingTree, setLoadingTree] = useState(false) + const [loadingFile, setLoadingFile] = useState(false) + const [uploading, setUploading] = useState(false) + const [treeWidth, setTreeWidth] = useState(240) + const isDragging = useRef(false) + + const handleDragStart = (e: React.MouseEvent) => { + e.preventDefault() + isDragging.current = true + const startX = e.clientX + const startWidth = treeWidth + const onMove = (ev: MouseEvent) => { + if (!isDragging.current) return + setTreeWidth(Math.min(520, Math.max(160, startWidth + ev.clientX - startX))) + } + const onUp = () => { + isDragging.current = false + window.removeEventListener('mousemove', onMove) + window.removeEventListener('mouseup', onUp) + } + window.addEventListener('mousemove', onMove) + window.addEventListener('mouseup', onUp) + } + + const fetchFileTree = async () => { + setLoadingTree(true) + try { + const res: any = await skillApi.getSkillFiles(productId) + const nodes: SkillFileTreeNode[] = res.data || [] + setFileTree(nodes) + if (findNode(nodes, 'SKILL.md')) loadFileContent('SKILL.md') + } catch { + } finally { + setLoadingTree(false) + } + } + + const loadFileContent = async (path: string) => { + setSelectedPath(path) + setLoadingFile(true) + try { + const res: any = await skillApi.getSkillFileContent(productId, path) + setSelectedFile(res.data) + } catch { + } finally { + setLoadingFile(false) + } + } + + useEffect(() => { fetchFileTree() }, [productId]) + + const customRequest = async (options: any) => { + const { file, onSuccess, onError } = options + setUploading(true) + try { + const res: any = await skillApi.uploadSkillPackage(productId, file) + message.success('上传成功') + onSuccess(res) + await fetchFileTree() + onUploadSuccess?.() + } catch (error: any) { + message.destroy() + message.error(error.response?.data?.message || '上传失败') + onError(error) + } finally { + setUploading(false) + } + } + + const renderPreview = () => { + if (loadingFile) return
+ + if (!selectedFile) return ( +
+
+ +

点击左侧文件查看内容

+
+
+ ) + + if (selectedFile.encoding === 'base64') return ( +
+

二进制文件,不支持预览

+
+ ) + + if (selectedFile.path.endsWith('.md')) { + const { entries, body } = parseFrontMatter(selectedFile.content) + return ( +
+ {entries.length > 0 && ( +
+ + + {entries.map(([k]) => ( + + ))} + + + + + {entries.map(([k, v]) => ( + + ))} + + +
{k}
{v}
+ )} +
+ {body} +
+
+ ) + } + + const lang = (() => { + const ext = selectedFile.path.split('.').pop()?.toLowerCase() ?? '' + const map: Record = { py: 'python', js: 'javascript', ts: 'typescript', tsx: 'typescript', jsx: 'javascript', json: 'json', yaml: 'yaml', yml: 'yaml', sh: 'shell', bash: 'shell', css: 'css', html: 'html', xml: 'xml', sql: 'sql', java: 'java', go: 'go', rs: 'rust', rb: 'ruby', kt: 'kotlin', swift: 'swift', c: 'c', cpp: 'cpp', h: 'c', hpp: 'cpp' } + return map[ext] || 'plaintext' + })() + + return ( + + ) + } + + return ( +
+
+

Skill Package

+

上传并管理技能包文件

+
+ + {!hasNacos && ( + + 前往 Nacos 管理 + + } + /> + )} + + +

+

点击或拖拽上传 Skill 包

+

支持 .zip 和 .tar.gz 格式,最大 50MB

+
+ +
+
+ {loadingTree + ?
+ : fileTree.length === 0 + ?
暂无文件
+ : + } +
+ {/* 拖拽分隔条 */} +
+
{renderPreview()}
+
+
+ ) +} diff --git a/himarket-web/himarket-admin/src/components/api-product/ApiProductUsageGuide.tsx b/himarket-web/himarket-admin/src/components/api-product/ApiProductUsageGuide.tsx index 4c07f05c4..c072b2b3f 100644 --- a/himarket-web/himarket-admin/src/components/api-product/ApiProductUsageGuide.tsx +++ b/himarket-web/himarket-admin/src/components/api-product/ApiProductUsageGuide.tsx @@ -1,6 +1,6 @@ import { Card, Button, Space, message } from 'antd' import { SaveOutlined, UploadOutlined, FileMarkdownOutlined, EditOutlined } from '@ant-design/icons' -import { useEffect, useState, useRef } from 'react' +import { useState, useRef } from 'react' import ReactMarkdown from 'react-markdown' import remarkGfm from 'remark-gfm'; import MdEditor from 'react-markdown-editor-lite' @@ -17,22 +17,17 @@ export function ApiProductUsageGuide({ apiProduct, handleRefresh }: ApiProductUs const [content, setContent] = useState(apiProduct.document || '') const [isEditing, setIsEditing] = useState(false) const [originalContent, setOriginalContent] = useState(apiProduct.document || '') + const [saving, setSaving] = useState(false) const fileInputRef = useRef(null) - useEffect(() => { - const doc = apiProduct.document || '' - setContent(doc) - setOriginalContent(doc) - }, [apiProduct.document]) - const handleEdit = () => { setIsEditing(true) } const handleSave = () => { - // 提取 categoryIds 以保留产品类别信息 const categoryIds = apiProduct.categories?.map(cat => cat.categoryId) || []; - + + setSaving(true) apiProductApi.updateApiProduct(apiProduct.productId, { document: content, categories: categoryIds @@ -41,6 +36,8 @@ export function ApiProductUsageGuide({ apiProduct, handleRefresh }: ApiProductUs setIsEditing(false) setOriginalContent(content) handleRefresh(); + }).finally(() => { + setSaving(false) }) } @@ -70,7 +67,6 @@ export function ApiProductUsageGuide({ apiProduct, handleRefresh }: ApiProductUs } reader.readAsText(file) } - // 清空 input 值,允许重复选择同一文件 if (event.target) { event.target.value = '' } @@ -133,7 +129,7 @@ export function ApiProductUsageGuide({ apiProduct, handleRefresh }: ApiProductUs ) : (
{content ? ( -
{content}
@@ -309,4 +188,4 @@ export function ApiProductUsageGuide({ apiProduct, handleRefresh }: ApiProductUs />
) -} \ No newline at end of file +} diff --git a/himarket-web/himarket-admin/src/components/api-product/ModelFeatureForm.tsx b/himarket-web/himarket-admin/src/components/api-product/ModelFeatureForm.tsx index 13a95341b..9145670aa 100644 --- a/himarket-web/himarket-admin/src/components/api-product/ModelFeatureForm.tsx +++ b/himarket-web/himarket-admin/src/components/api-product/ModelFeatureForm.tsx @@ -1,107 +1,94 @@ -import { useState, useEffect } from "react"; -import { Form, Input, InputNumber, Switch, Collapse, Row, Col } from "antd"; +import { Form, Input, InputNumber, Switch, Row, Col, Divider } from "antd"; -const { Panel } = Collapse; - -interface ModelFeatureFormProps { - initialExpanded?: boolean; -} - -export default function ModelFeatureForm({ initialExpanded = false }: ModelFeatureFormProps) { - const [activeKey, setActiveKey] = useState([]); - - const tooltipStyle = { - overlayInnerStyle: { - backgroundColor: '#000', - color: '#fff', - } - }; - - useEffect(() => { - setActiveKey(initialExpanded ? ['1'] : []); - }, [initialExpanded]); +const tooltipStyle = { + overlayInnerStyle: { + backgroundColor: '#000', + color: '#fff', + } +}; +export default function ModelFeatureForm() { return ( - setActiveKey(keys as string[])} - style={{ marginBottom: 16 }} - > - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + <> + 模型参数 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ); } - diff --git a/himarket-web/himarket-admin/src/components/api-product/SkillConfigForm.tsx b/himarket-web/himarket-admin/src/components/api-product/SkillConfigForm.tsx new file mode 100644 index 000000000..d130ed809 --- /dev/null +++ b/himarket-web/himarket-admin/src/components/api-product/SkillConfigForm.tsx @@ -0,0 +1,32 @@ +import { Form, Select, Tag } from "antd"; + +/** + * 技能配置表单组件 + * 包含技能标签多选输入,用于 AGENT_SKILL 类型产品的配置 + */ +export default function SkillConfigForm() { + return ( + + - {searchTypeOptions.map(option => ( - - {option.label} - - ))} - - - {/* 分隔线 */} -
- - {/* 中间:搜索值输入框或选择框 */} - {searchType === 'type' ? ( - - ) : searchType === 'category' ? ( - - ) : ( + {/* Tabs 按类型分组 */} +
+
+ + {/* 名称搜索框 */} +
setSearchValue(e.target.value)} - style={{ - flex: 1, - }} + placeholder="搜索产品名称" + value={searchInput} + onChange={(e) => setSearchInput(e.target.value)} onPressEnter={handleSearch} allowClear - onClear={() => setSearchValue('')} - size="large" - className="h-10 border-0 rounded-none" + onClear={handleClearSearch} + size="middle" variant="borderless" + className="border-0" /> - )} - - {/* 分隔线 */} -
- - {/* 右侧:搜索按钮 */} -
- {/* 筛选条件标签 */} - {activeFilters.length > 0 && ( -
+ {/* 当前筛选提示 */} + {nameFilter && ( +
筛选条件: - - {activeFilters.map(filter => ( - removeFilter(filter.type)} - style={{ - backgroundColor: '#f5f5f5', - border: '1px solid #d9d9d9', - borderRadius: '16px', - color: '#666', - fontSize: '12px', - padding: '4px 12px', - }} - > - {filter.label} - - ))} - -
- 清除筛选条件 -
+ 产品名称:{nameFilter} +
)} -
- {loading ? ( -
- {Array.from({ length: pagination.pageSize || 12 }).map((_, index) => ( -
-
- -
-
- - -
- - -
- - + {/* 产品列表 */} +
+ {loading ? ( +
+ {Array.from({ length: pagination.pageSize || 12 }).map((_, index) => ( +
+
+ +
+
+ + +
+ + +
-
+ ))}
- ))} -
- ) : ( - <> -
- {apiProducts.map((product) => ( - fetchApiProducts(pagination.current, pagination.pageSize, filters)} - onEdit={handleEdit} - /> - ))} -
- - {pagination.total > 0 && ( -
- `共 ${total} 条`} - pageSizeOptions={['6', '12', '24', '48']} - /> + ) : apiProducts.length === 0 ? ( +
+ +

暂无{activeTab !== 'ALL' ? ` ${getTypeLabel(activeTab)} ` : ''}产品

+ ) : ( + <> +
+ {apiProducts.map((product) => ( + fetchApiProducts(pagination.current, pagination.pageSize)} + onEdit={handleEdit} + /> + ))} +
+ {pagination.total > 0 && ( +
+ `共 ${total} 条`} + pageSizeOptions={['6', '12', '24', '48']} + /> +
+ )} + )} - - )} +
+
- ) + ); } diff --git a/himarket-web/himarket-admin/src/pages/NacosConsoles.tsx b/himarket-web/himarket-admin/src/pages/NacosConsoles.tsx index 548699d34..1c62280df 100644 --- a/himarket-web/himarket-admin/src/pages/NacosConsoles.tsx +++ b/himarket-web/himarket-admin/src/pages/NacosConsoles.tsx @@ -1,7 +1,7 @@ import { useState, useEffect } from 'react' -import { Button, Table, Modal, Form, Input, message, Select } from 'antd' +import { Button, Table, Modal, Form, Input, message, Select, Tag, Popconfirm } from 'antd' import dayjs from 'dayjs' -import { PlusOutlined } from '@ant-design/icons' +import { PlusOutlined, StarOutlined } from '@ant-design/icons' import { nacosApi } from '@/lib/api' import NacosTypeSelector, { NacosImportType } from '@/components/console/NacosTypeSelector' import ImportMseNacosModal from '@/components/console/ImportMseNacosModal' @@ -26,6 +26,13 @@ export default function NacosConsoles() { // 创建来源:OPEN_SOURCE 或 MSE(用于控制是否展示 AK/SK) const [creationMode, setCreationMode] = useState<'OPEN_SOURCE' | 'MSE' | null>(null) // 命名空间字段已移除 + // 设置默认命名空间弹窗 + const [nsModalVisible, setNsModalVisible] = useState(false) + const [nsTargetNacos, setNsTargetNacos] = useState(null) + const [nsNamespaces, setNsNamespaces] = useState([]) + const [nsLoading, setNsLoading] = useState(false) + const [nsSelectedValue, setNsSelectedValue] = useState('public') + const [nsSaving, setNsSaving] = useState(false) // 分页状态 const [currentPage, setCurrentPage] = useState(1) @@ -60,18 +67,58 @@ export default function NacosConsoles() { } } + const handleSetDefault = async (nacosId: string) => { + try { + await nacosApi.setDefaultNacos(nacosId) + message.success('已设为默认 Nacos 实例') + fetchNacosInstances() + } catch (error) { + console.error('设置默认失败:', error) + } + } + + const handleOpenNsModal = async (record: NacosInstance) => { + setNsTargetNacos(record) + setNsSelectedValue(record.defaultNamespace || 'public') + setNsModalVisible(true) + setNsLoading(true) + try { + const res = await nacosApi.getNamespaces(record.nacosId, { page: 1, size: 1000 }) + setNsNamespaces(res.data?.content || []) + } catch { + setNsNamespaces([]) + message.error('获取命名空间列表失败,请检查 Nacos 连接信息') + } finally { + setNsLoading(false) + } + } + + const handleSaveDefaultNs = async () => { + if (!nsTargetNacos) return + setNsSaving(true) + try { + await nacosApi.setDefaultNamespace(nsTargetNacos.nacosId, nsSelectedValue) + message.success('默认命名空间设置成功') + setNsModalVisible(false) + fetchNacosInstances() + } catch { + message.error('设置默认命名空间失败') + } finally { + setNsSaving(false) + } + } + const handleEdit = (record: NacosInstance) => { setEditingNacos(record) form.setFieldsValue({ nacosName: record.nacosName, - serverUrl: record.serverUrl, + serverUrl: record.serverUrl, username: record.username, - // 密码/AK/SK 可能不返回,这里仅在存在时回填 - password: record.password, - accessKey: record.accessKey, - secretKey: record.secretKey, - description: record.description + password: record.password, + accessKey: record.accessKey, + secretKey: record.secretKey, + description: record.description, }) setModalVisible(true) } @@ -125,8 +172,8 @@ export default function NacosConsoles() { const handleModalCancel = () => { setModalVisible(false) setEditingNacos(null) - setCreationMode(null) - setImportEndpoints({}) + setCreationMode(null) + setImportEndpoints({}) form.resetFields() } @@ -137,13 +184,28 @@ export default function NacosConsoles() { title: '实例名称', dataIndex: 'nacosName', key: 'nacosName', + render: (name: string, record: NacosInstance) => ( + + {name} + {record.isDefault && 默认} + + ), }, { title: '服务器地址', dataIndex: 'serverUrl', key: 'serverUrl', }, - // 命名空间列已移除 + { + title: '默认命名空间', + dataIndex: 'defaultNamespace', + key: 'defaultNamespace', + render: (ns: string, record: NacosInstance) => ( + + ), + }, { title: '用户名', dataIndex: 'username', @@ -153,9 +215,6 @@ export default function NacosConsoles() { title: '描述', dataIndex: 'description', key: 'description', - // render: (description: string) => { - // return {description || '-'} - // }, ellipsis: true, }, { @@ -170,13 +229,31 @@ export default function NacosConsoles() { { title: '操作', key: 'action', - width: 150, + width: 220, render: (_: any, record: NacosInstance) => (
- + + )} +
@@ -336,6 +413,33 @@ export default function NacosConsoles() { setImportNacosId(values.nacosId || null) }} /> + + {/* 设置默认命名空间弹窗 */} + { setNsModalVisible(false); setNsTargetNacos(null); setNsNamespaces([]) }} + okText="确认" + cancelText="取消" + confirmLoading={nsSaving} + width={480} + > +
+ 选择该 Nacos 实例的默认命名空间,新建的 Skill 将自动使用此命名空间。 +
+ { + const providerObj = providers.find(p => p.key === val); + onChange(val, providerObj); + }} + title="切换 CLI Agent" + options={providers.map(p => ({ + value: p.key, + label: p.displayName + (!p.available ? " (不可用)" : ""), + disabled: !p.available, + }))} + /> + ); +} diff --git a/himarket-web/himarket-frontend/src/components/coding/CodingInput.tsx b/himarket-web/himarket-frontend/src/components/coding/CodingInput.tsx new file mode 100644 index 000000000..fb2e66c76 --- /dev/null +++ b/himarket-web/himarket-frontend/src/components/coding/CodingInput.tsx @@ -0,0 +1,583 @@ +import { + useState, + useRef, + useCallback, + type KeyboardEvent, + type ClipboardEvent, + type DragEvent, + type ChangeEvent, +} from "react"; +import { + Send, + Square, + Paperclip, + X, + Image, + FileText, + Loader2, +} from "lucide-react"; +import { useCodingState, useActiveCodingSession } from "../../context/CodingSessionContext"; +import { SlashMenu } from "./SlashMenu"; +import { FileMentionMenu } from "./FileMentionMenu"; +import { + uploadFileToWorkspace, + fetchDirectoryTree, +} from "../../lib/utils/workspaceApi"; +import { + flattenFileTree, + filterFiles, + type FlatFileItem, +} from "../../lib/utils/fileTreeUtils"; +import type { Attachment, FilePathAttachment } from "../../types/coding-protocol"; +import type { QueuedPromptItem } from "../../context/CodingSessionContext"; + +const MAX_ATTACHMENTS = 10; +const MAX_SIZE_BYTES = 5 * 1024 * 1024; // 5MB + +// Browsers often return "" for many text file types; map common extensions explicitly +const EXT_TO_MIME: Record = { + md: "text/markdown", + mdx: "text/markdown", + txt: "text/plain", + csv: "text/csv", + json: "application/json", + yaml: "application/x-yaml", + yml: "application/x-yaml", + toml: "application/toml", + xml: "application/xml", + sql: "application/sql", + graphql: "application/graphql", + sh: "application/x-sh", + bash: "application/x-sh", +}; + +function inferMimeType(file: File): string { + if (file.type) return file.type; + const ext = file.name.split(".").pop()?.toLowerCase() ?? ""; + return EXT_TO_MIME[ext] ?? "application/octet-stream"; +} + +let _attId = 0; +function nextAttId(): string { + return `att-${++_attId}-${Date.now()}`; +} + +interface CodingInputProps { + onSend: ( + text: string, + attachments?: Attachment[] + ) => + | { queued: true; queuedPromptId?: string } + | { queued: false; requestId?: string | number }; + onSendQueued?: (queuedPromptId?: string) => void; + onDropQueuedPrompt: (promptId: string) => void; + onCancel: () => void; + isProcessing: boolean; + queueSize: number; + queuedPrompts: QueuedPromptItem[]; + disabled: boolean; + variant?: "default" | "welcome"; + /** Extra elements rendered in the welcome toolbar, after the attachment button */ + toolbarExtra?: React.ReactNode; +} + +export function CodingInput({ + onSend, + onSendQueued, + onDropQueuedPrompt, + onCancel, + isProcessing, + queueSize, + queuedPrompts, + disabled, + variant = "default", + toolbarExtra, +}: CodingInputProps) { + const [text, setText] = useState(""); + const [showSlash, setShowSlash] = useState(false); + const [showMentionMenu, setShowMentionMenu] = useState(false); + const [mentionFilter, setMentionFilter] = useState(""); + const [flatFiles, setFlatFiles] = useState([]); + const [filesLoading, setFilesLoading] = useState(false); + const [attachments, setAttachments] = useState([]); + const [uploading, setUploading] = useState(false); + const [dragOver, setDragOver] = useState(false); + const [mentionedFiles, setMentionedFiles] = useState([]); + const inputRef = useRef(null); + const fileInputRef = useRef(null); + const state = useCodingState(); + const activeQuest = useActiveCodingSession(); + + // Upload files to backend and create FilePathAttachment entries + const addFiles = useCallback( + async (files: FileList | File[]) => { + const fileArray = Array.from(files); + if (fileArray.length === 0) return; + + const remaining = MAX_ATTACHMENTS - attachments.length; + if (remaining <= 0) return; + const toProcess = fileArray + .slice(0, remaining) + .filter(f => f.size <= MAX_SIZE_BYTES); + + if (toProcess.length === 0) return; + + setUploading(true); + const newAttachments: FilePathAttachment[] = []; + for (const file of toProcess) { + try { + const serverPath = await uploadFileToWorkspace(file); + const isImage = file.type.startsWith("image/"); + newAttachments.push({ + id: nextAttId(), + kind: "file_path", + name: file.name, + filePath: serverPath, + mimeType: inferMimeType(file), + previewUrl: isImage ? URL.createObjectURL(file) : undefined, + }); + } catch { + // skip failed files + } + } + setUploading(false); + if (newAttachments.length > 0) { + setAttachments(prev => [...prev, ...newAttachments]); + } + }, + [attachments.length] + ); + + const removeAttachment = useCallback((id: string) => { + setAttachments(prev => { + const att = prev.find(a => a.id === id); + if (att && att.previewUrl) { + URL.revokeObjectURL(att.previewUrl); + } + return prev.filter(a => a.id !== id); + }); + }, []); + + // Load file tree on first "@" trigger + const loadFileTree = useCallback(async () => { + if (flatFiles.length > 0 || !activeQuest?.cwd) return; + + setFilesLoading(true); + try { + const tree = await fetchDirectoryTree(activeQuest.cwd, 10); + const flattened = flattenFileTree(tree ?? [], activeQuest.cwd); + setFlatFiles(flattened); + } catch { + setFlatFiles([]); + } finally { + setFilesLoading(false); + } + }, [flatFiles.length, activeQuest?.cwd]); + + const removeMention = useCallback((path: string) => { + setMentionedFiles(prev => prev.filter(f => f.path !== path)); + }, []); + + const handleSend = useCallback(() => { + const trimmed = text.trim(); + if (!trimmed && attachments.length === 0 && mentionedFiles.length === 0) return; + + // Convert mentioned files to resource_link attachments + const mentionAttachments: FilePathAttachment[] = mentionedFiles.map(file => ({ + id: nextAttId(), + kind: "file_path" as const, + name: file.name, + filePath: file.path, + mimeType: file.extension ? `text/${file.extension}` : "text/plain", + })); + + const allAttachments = [...mentionAttachments, ...attachments]; + + const result = onSend( + trimmed, + allAttachments.length > 0 ? allAttachments : undefined + ); + if (result.queued) { + onSendQueued?.(result.queuedPromptId); + } + setText(""); + setShowSlash(false); + setShowMentionMenu(false); + setAttachments([]); + setMentionedFiles([]); + }, [text, attachments, mentionedFiles, onSend, onSendQueued]); + + const handleKeyDown = (e: KeyboardEvent) => { + // Let SlashMenu or FileMentionMenu handle navigation when open + if ( + (showSlash || showMentionMenu) && + ["ArrowDown", "ArrowUp", "Enter", "Tab"].includes(e.key) + ) { + return; + } + + if (e.key === "Enter" && !e.shiftKey) { + e.preventDefault(); + handleSend(); + } else if (e.key === "Escape") { + if (showSlash) { + e.preventDefault(); + setShowSlash(false); + } else if (showMentionMenu) { + e.preventDefault(); + setShowMentionMenu(false); + setMentionFilter(""); + } + } + }; + + const handleChange = (value: string) => { + setText(value); + + // Check for slash command (only at start) + const isSlashCommand = value === "/" || (value.startsWith("/") && !value.includes(" ")); + setShowSlash(isSlashCommand); + + // Check for "@" mention (at end of input) + const mentionMatch = value.match(/@(\S*)$/); + if (mentionMatch && !isSlashCommand) { + setShowMentionMenu(true); + setMentionFilter(mentionMatch[1]); + loadFileTree(); + } else { + setShowMentionMenu(false); + setMentionFilter(""); + } + }; + + const handleCommandSelect = (name: string) => { + setText("/" + name + " "); + setShowSlash(false); + inputRef.current?.focus(); + }; + + const handleFileSelect = useCallback( + (file: FlatFileItem) => { + // Remove "@query" from text — the file chip provides the visual reference + const newText = text.replace(/@\S*$/, ""); + setText(newText); + setShowMentionMenu(false); + setMentionFilter(""); + + // Add to mentioned files if not already present + setMentionedFiles(prev => { + if (prev.some(f => f.path === file.path)) return prev; + return [...prev, file]; + }); + + inputRef.current?.focus(); + }, + [text] + ); + + const handlePaste = (e: ClipboardEvent) => { + const items = e.clipboardData?.files; + if (items && items.length > 0) { + const hasImage = Array.from(items).some(f => f.type.startsWith("image/")); + if (hasImage) { + e.preventDefault(); + addFiles(items); + } + } + }; + + const handleDragOver = (e: DragEvent) => { + e.preventDefault(); + setDragOver(true); + }; + + const handleDragLeave = (e: DragEvent) => { + e.preventDefault(); + setDragOver(false); + }; + + const handleDrop = (e: DragEvent) => { + e.preventDefault(); + setDragOver(false); + const files = e.dataTransfer?.files; + if (files && files.length > 0) { + addFiles(files); + } + }; + + const handleFileChange = (e: ChangeEvent) => { + const files = e.target.files; + if (files && files.length > 0) { + addFiles(files); + } + // reset so same file can be selected again + e.target.value = ""; + }; + + const canSend = + !disabled && + !uploading && + (text.trim().length > 0 || attachments.length > 0 || mentionedFiles.length > 0); + + return ( +
+ {isProcessing && ( +
+
+
+ )} + {showSlash && state.commands.length > 0 && ( + + )} + {showMentionMenu && ( + + )} + + {/* Mentioned file chips (from @ references) */} + {mentionedFiles.length > 0 && ( +
+ {mentionedFiles.map(file => ( + + + + {file.name} + + + + ))} +
+ )} + + {/* Attachment preview strip */} + {(attachments.length > 0 || uploading) && ( +
+ {attachments.map(att => + att.previewUrl ? ( +
+ {att.name} + +
+ ) : ( +
+ {att.mimeType?.startsWith("image/") ? ( + + ) : ( + + )} + + {att.name} + + +
+ ) + )} + {uploading && ( +
+ + 上传中... +
+ )} +
+ )} + + {queuedPrompts.length > 0 && ( +
+
+ 队列中 {queueSize} 条消息 +
+
+ {queuedPrompts.map(item => ( +
+ + {item.text || "[仅附件]"} + + +
+ ))} +
+
+ )} + + {variant === "welcome" ? ( + /* Welcome 模式布局 */ + <> +