diff --git a/README.md b/README.md index a5f62316..8fe26299 100644 --- a/README.md +++ b/README.md @@ -56,6 +56,7 @@ This launches the React-based web UI at `http://localhost:5173` with: - Kanban board view of features - Real-time agent output streaming - Start/pause/stop controls +- **Project Assistant** - AI chat for managing features and exploring the codebase ### Option 2: CLI Mode @@ -103,6 +104,21 @@ Features are stored in SQLite via SQLAlchemy and managed through an MCP server t - `feature_mark_passing` - Mark feature complete - `feature_skip` - Move feature to end of queue - `feature_create_bulk` - Initialize all features (used by initializer) +- `feature_create` - Create a single feature + +### Project Assistant + +The Web UI includes a **Project Assistant** - an AI-powered chat interface for each project. Click the chat button in the bottom-right corner to open it. + +**Capabilities:** +- **Explore the codebase** - Ask questions about files, architecture, and implementation details +- **Manage features** - Create new features and deprioritize (skip) existing ones via natural language +- **Get feature details** - Ask about specific features, their status, and test steps + +**Conversation Persistence:** +- Conversations are automatically saved to `assistant.db` in each project directory +- When you navigate away and return, your conversation resumes where you left off +- Click "New Chat" to start a fresh conversation ### Session Management @@ -151,8 +167,8 @@ autonomous-coding/ │ ├── main.py # FastAPI REST API server │ ├── websocket.py # WebSocket handler for real-time updates │ ├── schemas.py # Pydantic schemas -│ ├── routers/ # API route handlers -│ └── services/ # Business logic services +│ ├── routers/ # API route handlers (projects, features, agent, assistant) +│ └── services/ # Business logic (assistant chat sessions, database) ├── ui/ # React frontend │ ├── src/ │ │ ├── App.tsx # Main app component @@ -179,6 +195,7 @@ After the agent runs, your project directory will contain: ``` generations/my_project/ ├── features.db # SQLite database (feature test cases) +├── assistant.db # SQLite database (assistant chat history) ├── prompts/ │ ├── app_spec.txt # Your app specification │ ├── initializer_prompt.md # First session prompt diff --git a/server/routers/assistant_chat.py b/server/routers/assistant_chat.py index dae53b4a..805a848b 100644 --- a/server/routers/assistant_chat.py +++ b/server/routers/assistant_chat.py @@ -288,6 +288,41 @@ async def assistant_chat_websocket(websocket: WebSocket, project_name: str): "content": f"Failed to start session: {str(e)}" }) + elif msg_type == "resume": + # Resume an existing conversation without sending greeting + conversation_id = message.get("conversation_id") + + # Validate conversation_id is present and valid + if not conversation_id or not isinstance(conversation_id, int): + logger.warning(f"Invalid resume request for {project_name}: missing or invalid conversation_id") + await websocket.send_json({ + "type": "error", + "content": "Missing or invalid conversation_id for resume" + }) + continue + + try: + # Create session + session = await create_session( + project_name, + project_dir, + conversation_id=conversation_id, + ) + # Initialize but skip the greeting + async for chunk in session.start(skip_greeting=True): + await websocket.send_json(chunk) + # Confirm we're ready + await websocket.send_json({ + "type": "conversation_created", + "conversation_id": conversation_id, + }) + except Exception as e: + logger.exception(f"Error resuming assistant session for {project_name}") + await websocket.send_json({ + "type": "error", + "content": f"Failed to resume session: {str(e)}" + }) + elif msg_type == "message": if not session: session = get_session(project_name) diff --git a/server/services/assistant_chat_session.py b/server/services/assistant_chat_session.py index bebed941..9f996057 100755 --- a/server/services/assistant_chat_session.py +++ b/server/services/assistant_chat_session.py @@ -191,12 +191,15 @@ async def close(self) -> None: self._client_entered = False self.client = None - async def start(self) -> AsyncGenerator[dict, None]: + async def start(self, skip_greeting: bool = False) -> AsyncGenerator[dict, None]: """ Initialize session with the Claude client. Creates a new conversation if none exists, then sends an initial greeting. Yields message chunks as they stream in. + + Args: + skip_greeting: If True, skip sending the greeting (for resuming conversations) """ # Create a new conversation if we don't have one if self.conversation_id is None: @@ -267,18 +270,19 @@ async def start(self) -> AsyncGenerator[dict, None]: yield {"type": "error", "content": f"Failed to initialize assistant: {str(e)}"} return - # Send initial greeting - try: - greeting = f"Hello! I'm your project assistant for **{self.project_name}**. I can help you understand the codebase, explain features, and answer questions about the project. What would you like to know?" + # Send initial greeting (unless resuming) + if not skip_greeting: + try: + greeting = f"Hello! I'm your project assistant for **{self.project_name}**. I can help you understand the codebase, manage features (create and deprioritize), and answer questions about the project. What would you like to do?" - # Store the greeting in the database - add_message(self.project_dir, self.conversation_id, "assistant", greeting) + # Store the greeting in the database + add_message(self.project_dir, self.conversation_id, "assistant", greeting) - yield {"type": "text", "content": greeting} - yield {"type": "response_done"} - except Exception as e: - logger.exception("Failed to send greeting") - yield {"type": "error", "content": f"Failed to start conversation: {str(e)}"} + yield {"type": "text", "content": greeting} + yield {"type": "response_done"} + except Exception as e: + logger.exception("Failed to send greeting") + yield {"type": "error", "content": f"Failed to start conversation: {str(e)}"} async def send_message(self, user_message: str) -> AsyncGenerator[dict, None]: """ diff --git a/start_ui.sh b/start_ui.sh old mode 100644 new mode 100755 diff --git a/ui/src/components/AssistantChat.tsx b/ui/src/components/AssistantChat.tsx index ef8aeb32..80264ac8 100644 --- a/ui/src/components/AssistantChat.tsx +++ b/ui/src/components/AssistantChat.tsx @@ -3,101 +3,119 @@ * * Main chat interface for the project assistant. * Displays messages and handles user input. + * Automatically resumes the most recent conversation. */ -import { useState, useRef, useEffect, useCallback } from 'react' -import { Send, Loader2, Wifi, WifiOff } from 'lucide-react' -import { useAssistantChat } from '../hooks/useAssistantChat' -import { ChatMessage } from './ChatMessage' +import { useState, useRef, useEffect, useCallback } from "react"; +import { Send, Loader2, Wifi, WifiOff, Plus } from "lucide-react"; +import { useAssistantChat } from "../hooks/useAssistantChat"; +import { ChatMessage } from "./ChatMessage"; interface AssistantChatProps { - projectName: string + projectName: string; } export function AssistantChat({ projectName }: AssistantChatProps) { - const [inputValue, setInputValue] = useState('') - const messagesEndRef = useRef(null) - const inputRef = useRef(null) - const hasStartedRef = useRef(false) + const [inputValue, setInputValue] = useState(""); + const messagesEndRef = useRef(null); + const inputRef = useRef(null); // Memoize the error handler to prevent infinite re-renders const handleError = useCallback((error: string) => { - console.error('Assistant error:', error) - }, []) + console.error("Assistant error:", error); + }, []); const { messages, isLoading, connectionStatus, - start, + isLoadingHistory, + startNewConversation, sendMessage, } = useAssistantChat({ projectName, onError: handleError, - }) + }); // Auto-scroll to bottom on new messages useEffect(() => { - messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }) - }, [messages]) - - // Start the chat session when component mounts (only once) - useEffect(() => { - if (!hasStartedRef.current) { - hasStartedRef.current = true - start() - } - }, [start]) + messagesEndRef.current?.scrollIntoView({ behavior: "smooth" }); + }, [messages]); // Focus input when not loading useEffect(() => { if (!isLoading) { - inputRef.current?.focus() + inputRef.current?.focus(); } - }, [isLoading]) + }, [isLoading]); const handleSend = () => { - const content = inputValue.trim() - if (!content || isLoading) return + const content = inputValue.trim(); + if (!content || isLoading) return; - sendMessage(content) - setInputValue('') - } + sendMessage(content); + setInputValue(""); + }; const handleKeyDown = (e: React.KeyboardEvent) => { - if (e.key === 'Enter' && !e.shiftKey) { - e.preventDefault() - handleSend() + if (e.key === "Enter" && !e.shiftKey) { + e.preventDefault(); + handleSend(); } - } + }; return (
- {/* Connection status indicator */} -
- {connectionStatus === 'connected' ? ( - <> - - Connected - - ) : connectionStatus === 'connecting' ? ( - <> - - Connecting... - - ) : ( - <> - - Disconnected - - )} + {/* Header with connection status and new chat button */} +
+
+ {connectionStatus === "connected" ? ( + <> + + + Connected + + + ) : connectionStatus === "connecting" ? ( + <> + + + Connecting... + + + ) : ( + <> + + + Disconnected + + + )} +
+
{/* Messages area */}
{messages.length === 0 ? (
- {isLoading ? ( + {isLoadingHistory ? ( +
+ + Loading conversation... +
+ ) : isLoading ? (
Connecting to assistant... @@ -121,9 +139,18 @@ export function AssistantChat({ projectName }: AssistantChatProps) {
- - - + + +
Thinking...
@@ -139,7 +166,7 @@ export function AssistantChat({ projectName }: AssistantChatProps) { onChange={(e) => setInputValue(e.target.value)} onKeyDown={handleKeyDown} placeholder="Ask about the codebase..." - disabled={isLoading || connectionStatus !== 'connected'} + disabled={isLoading || connectionStatus !== "connected"} className=" flex-1 neo-input @@ -152,7 +179,11 @@ export function AssistantChat({ projectName }: AssistantChatProps) { />