diff --git a/README.md b/README.md index aa9ac1f..fae2f24 100644 --- a/README.md +++ b/README.md @@ -6,9 +6,11 @@ Browse and render Codex session `.jsonl` files with a web UI, and optionally sha ## Features - Indexes `~/.codex/sessions/{year}/{month}/{day}` and lists sessions by date. +- Adds `/active` for day-by-day active threads across all directories, with manual end/reopen controls. +- Accepts Codex CLI notification webhooks on `/hook` and lets you inspect them on `/notifications`. - Renders conversations as HTML with markdown support and dark theme. -- Shows only user/agent messages and reasoning; tool calls and other events are omitted. -- Consecutive messages are merged; for user groups, only the last message is kept. +- Shows user/agent messages, non-empty reasoning summaries, and response items such as tool calls, tool outputs, web searches, custom tool activity, and ghost snapshots. +- Consecutive messages and reasoning items are merged; visible tool/system items stay separate. For user groups, only the last message is kept. - User messages can be trimmed to content after `## My request for Codex:` (default on). - Share button saves a hard‑to‑guess HTML file to `~/.codex/shares` and copies its URL. - Separate share server serves only exact filenames (no directory listing). @@ -57,6 +59,11 @@ Visit: - UI: http://localhost:8080/ - Share server: http://localhost:8081/ +Key pages: +- `/` date/directory browser +- `/active` active thread list (default: today in your browser time zone) +- `/notifications` received webhook notifications + ## Autostart (systemd --user) If your environment supports user services (WSL with systemd, Linux desktops), you can keep it running. @@ -107,6 +114,27 @@ systemctl --user restart codex-manager - `-full` disable trimming to `## My request for Codex:` - `-h` / `--help` +## Repository overrides +- Default path: `~/.codex/session_repository_overrides.json` +- Purpose: override the GitHub repository used for branch links for specific `cwd` prefixes +- Create this file only when you need overrides; if the file is missing, Codex Manager falls back to `session_meta.git.repository_url` +- Matching: longest `cwd_prefix` match wins +- Changes are loaded at startup, so restart Codex Manager after editing the file +- Format: + ```json + { + "version": 1, + "rules": [ + { + "cwd_prefix": "/home/makoto/codex-manager", + "repository_url": "https://github.com/makoto-soracom/codex-manager.git" + } + ] + } + ``` +- Resolution order: repository override, then `session_meta.git.repository_url` +- Scope: this only changes the repository used for branch links; branch names and resume commands still come from session metadata + ## HTMLBucket notes - Auth file path: `~/.hb/auth.json` - Auth file format: @@ -140,6 +168,36 @@ Clicking “Share”: - Copies the share URL to your clipboard - Displays a banner showing the copied URL +## Active thread state +- New top-level threads are treated as active by default. +- `/active` uses your browser time zone (stored in a cookie) to decide which day a thread belongs to. +- A thread is shown as: + - `Waiting for user` + - `Waiting for agent` + - `Ended` +- `Ended` is a manual flag stored in `~/.codex/session_state.json`. +- If a thread receives new JSONL activity after being marked ended, it automatically returns to active. +- `/active` refreshes while the page is open without changing the global `--rescan-interval`. + +## Codex CLI notifications +You can point Codex CLI `notify` to the local server: + +```toml +notify = [ + "curl", + "-sS", + "-X", "POST", + "http://localhost:8080/hook", + "-H", "Content-Type: application/json", + "--data-binary" +] +``` + +- Incoming requests are accepted on `/hook`. +- Received notifications are appended to `~/.codex/notifications.jsonl`. +- `/notifications` shows the newest notifications first, with headers and body. +- The page auto-refreshes while open so you can inspect payloads as they arrive. + ## Development ```bash go test ./... diff --git a/cmd/codex-manager/main.go b/cmd/codex-manager/main.go index 1d4e65e..4fe7865 100644 --- a/cmd/codex-manager/main.go +++ b/cmd/codex-manager/main.go @@ -14,9 +14,12 @@ import ( "strings" "time" + "codex-manager/internal/active" "codex-manager/internal/config" "codex-manager/internal/htmlbucket" + "codex-manager/internal/notifications" "codex-manager/internal/render" + "codex-manager/internal/repooverride" "codex-manager/internal/search" "codex-manager/internal/sessions" "codex-manager/internal/web" @@ -42,6 +45,32 @@ func main() { log.Printf("initial scan failed: %v", err) } + activeIdx := active.NewIndex() + if err := activeIdx.RefreshFrom(idx); err != nil { + log.Printf("initial active index build failed: %v", err) + } + + activeState, err := active.LoadStateStore(active.DefaultStatePath(cfg.SessionsDir)) + if err != nil { + log.Printf("active state load failed: %v", err) + activeState, _ = active.LoadStateStore("") + } + if err := activeState.Reconcile(activeIdx.Summaries()); err != nil { + log.Printf("initial active state reconcile failed: %v", err) + } + + notificationStore, err := notifications.LoadStore(notifications.DefaultPath(cfg.SessionsDir)) + if err != nil { + log.Printf("notification store load failed: %v", err) + notificationStore, _ = notifications.LoadStore("") + } + + repositoryOverrideStore, err := repooverride.LoadStore(repooverride.DefaultPath(cfg.SessionsDir)) + if err != nil { + log.Printf("repository override load failed: %v", err) + repositoryOverrideStore, _ = repooverride.LoadStore("") + } + searchIdx := search.NewIndex() if err := searchIdx.RefreshFrom(idx); err != nil { log.Printf("initial search index build failed: %v", err) @@ -55,6 +84,12 @@ func main() { log.Printf("rescan failed: %v", err) continue } + if err := activeIdx.RefreshFrom(idx); err != nil { + log.Printf("active reindex failed: %v", err) + } + if err := activeState.Reconcile(activeIdx.Summaries()); err != nil { + log.Printf("active state reconcile failed: %v", err) + } if err := searchIdx.RefreshFrom(idx); err != nil { log.Printf("search reindex failed: %v", err) } @@ -67,6 +102,9 @@ func main() { } server := web.NewServer(idx, searchIdx, renderer, cfg.SessionsDir, cfg.ShareDir, cfg.ShareAddr, cfg.Theme) + server.EnableActive(activeIdx, activeState, 15*time.Second) + server.EnableNotifications(notificationStore) + server.EnableRepoOverrides(repositoryOverrideStore) if htmlBucketClient != nil { server.EnableHTMLBucket(htmlBucketClient) log.Printf("Using htmlbucket share backend (%s)", htmlBucketAuthPath) diff --git a/internal/active/index.go b/internal/active/index.go new file mode 100644 index 0000000..1c9f34b --- /dev/null +++ b/internal/active/index.go @@ -0,0 +1,424 @@ +package active + +import ( + "bufio" + "encoding/json" + "fmt" + "os" + "path" + "sort" + "strings" + "sync" + "time" + + "codex-manager/internal/sessions" +) + +// WaitState represents what the active thread is waiting on. +type WaitState string + +const ( + WaitStateUser WaitState = "user_waiting" + WaitStateAgent WaitState = "agent_waiting" +) + +// Snippet is a short user/agent preview shown in the active list. +type Snippet struct { + Text string + Title string + SpeakerClass string +} + +// Summary is the active-thread summary for a single top-level session file. +type Summary struct { + Key string + SessionID string + Date sessions.DateKey + Name string + Path string + DisplayName string + ThreadName string + Cwd string + Branch string + ResumeCommand string + Size int64 + ModTime time.Time + LastActivityAt time.Time + ActivityToken string + WaitState WaitState + LastUserSnippet Snippet + LastAssistantSnippet Snippet + HasUserMessage bool +} + +const thinkingPlaceholder = "Thinking..." + +type fileIndex struct { + size int64 + modTime time.Time + threadName string + summary Summary +} + +// Index caches active summaries derived from session files. +type Index struct { + mu sync.RWMutex + files map[string]fileIndex + byKey map[string]Summary + ordered []Summary + updated time.Time +} + +// NewIndex creates an empty active summary index. +func NewIndex() *Index { + return &Index{ + files: map[string]fileIndex{}, + byKey: map[string]Summary{}, + } +} + +// LastUpdated reports when RefreshFrom last succeeded. +func (idx *Index) LastUpdated() time.Time { + idx.mu.RLock() + defer idx.mu.RUnlock() + return idx.updated +} + +// Summaries returns all cached summaries sorted by last activity descending. +func (idx *Index) Summaries() []Summary { + idx.mu.RLock() + defer idx.mu.RUnlock() + out := make([]Summary, len(idx.ordered)) + copy(out, idx.ordered) + return out +} + +// Lookup returns a summary by its stable key. +func (idx *Index) Lookup(key string) (Summary, bool) { + idx.mu.RLock() + defer idx.mu.RUnlock() + summary, ok := idx.byKey[key] + return summary, ok +} + +// RefreshFrom rebuilds changed summaries from the sessions index. +func (idx *Index) RefreshFrom(sessionsIdx *sessions.Index) error { + dates := sessionsIdx.Dates() + files := make([]sessions.SessionFile, 0, len(dates)) + for _, date := range dates { + files = append(files, sessionsIdx.SessionsByDate(date)...) + } + + idx.mu.RLock() + existing := idx.files + idx.mu.RUnlock() + + nextFiles := make(map[string]fileIndex, len(files)) + nextByKey := make(map[string]Summary, len(files)) + summaries := make([]Summary, 0, len(files)) + var firstErr error + + for _, file := range files { + if file.Meta != nil && file.Meta.IsSubagentThread() { + continue + } + if cached, ok := existing[file.Path]; ok && cached.size == file.Size && cached.modTime.Equal(file.ModTime) && cached.threadName == file.ThreadName { + nextFiles[file.Path] = cached + nextByKey[cached.summary.Key] = cached.summary + summaries = append(summaries, cached.summary) + continue + } + + summary, err := buildSummary(file) + if err != nil { + if firstErr == nil { + firstErr = err + } + if cached, ok := existing[file.Path]; ok { + nextFiles[file.Path] = cached + nextByKey[cached.summary.Key] = cached.summary + summaries = append(summaries, cached.summary) + } + continue + } + + entry := fileIndex{ + size: file.Size, + modTime: file.ModTime, + threadName: file.ThreadName, + summary: summary, + } + nextFiles[file.Path] = entry + nextByKey[summary.Key] = summary + summaries = append(summaries, summary) + } + + sort.Slice(summaries, func(i, j int) bool { + if summaries[i].LastActivityAt.Equal(summaries[j].LastActivityAt) { + if summaries[i].ModTime.Equal(summaries[j].ModTime) { + if summaries[i].Date.String() == summaries[j].Date.String() { + return summaries[i].Name < summaries[j].Name + } + return summaries[i].Date.String() > summaries[j].Date.String() + } + return summaries[i].ModTime.After(summaries[j].ModTime) + } + return summaries[i].LastActivityAt.After(summaries[j].LastActivityAt) + }) + + idx.mu.Lock() + idx.files = nextFiles + idx.byKey = nextByKey + idx.ordered = summaries + idx.updated = time.Now() + idx.mu.Unlock() + return firstErr +} + +func buildSummary(file sessions.SessionFile) (Summary, error) { + activity, err := scanActivity(file) + if err != nil { + return Summary{}, err + } + + userSnippet, assistantSnippet, hasUser, assistantAfterLatestUser, err := extractSnippets(file.Path, file.Meta) + if err != nil { + userSnippet = Snippet{Title: "User", SpeakerClass: "user"} + assistantSnippet = Snippet{Title: "Agent", SpeakerClass: "agent"} + hasUser = false + assistantAfterLatestUser = false + } + if activity.WaitState == WaitStateAgent && hasUser && !assistantAfterLatestUser { + assistantSnippet.Text = thinkingPlaceholder + } + + return Summary{ + Key: summaryKey(file), + SessionID: sessionID(file.Meta), + Date: file.Date, + Name: file.Name, + Path: file.Path, + DisplayName: file.DisplayName(), + ThreadName: file.ThreadName, + Cwd: sessions.CwdForFile(file), + Branch: branchForMeta(file.Meta), + ResumeCommand: buildResumeCommand(file.Meta), + Size: file.Size, + ModTime: file.ModTime, + LastActivityAt: activity.LastActivityAt, + ActivityToken: activity.ActivityToken, + WaitState: activity.WaitState, + LastUserSnippet: userSnippet, + LastAssistantSnippet: assistantSnippet, + HasUserMessage: hasUser, + }, nil +} + +func summaryKey(file sessions.SessionFile) string { + if file.Meta != nil { + if id := strings.TrimSpace(file.Meta.ID); id != "" { + return "id:" + id + } + } + return "path:" + path.Join(file.Date.Path(), file.Name) +} + +func sessionID(meta *sessions.SessionMeta) string { + if meta == nil { + return "" + } + return strings.TrimSpace(meta.ID) +} + +func buildResumeCommand(meta *sessions.SessionMeta) string { + if meta == nil || strings.TrimSpace(meta.ID) == "" { + return "" + } + commands := make([]string, 0, 3) + if cwd := strings.TrimSpace(meta.Cwd); cwd != "" { + commands = append(commands, "cd "+shellQuote(cwd)) + } + if branch := branchForMeta(meta); branch != "" { + commands = append(commands, "git switch "+shellQuote(branch)) + } + commands = append(commands, "codex resume "+strings.TrimSpace(meta.ID)) + return strings.Join(commands, "\n") +} + +func shellQuote(value string) string { + if value == "" { + return "''" + } + return "'" + strings.ReplaceAll(value, "'", "'\"'\"'") + "'" +} + +func branchForMeta(meta *sessions.SessionMeta) string { + if meta == nil { + return "" + } + return meta.GitBranch() +} + +type activityInfo struct { + LastActivityAt time.Time + ActivityToken string + WaitState WaitState +} + +type activityEnvelope struct { + Timestamp string `json:"timestamp"` + Type string `json:"type"` + Payload json.RawMessage `json:"payload"` +} + +func scanActivity(file sessions.SessionFile) (activityInfo, error) { + f, err := os.Open(file.Path) + if err != nil { + return activityInfo{}, err + } + defer f.Close() + + scanner := bufio.NewScanner(f) + scanner.Buffer(make([]byte, 0, 64*1024), 4*1024*1024) + + var lastActivity time.Time + lineCount := 0 + waitState := WaitStateUser + + for scanner.Scan() { + line := strings.TrimSpace(scanner.Text()) + if line == "" { + continue + } + lineCount++ + + var env activityEnvelope + if err := json.Unmarshal([]byte(line), &env); err != nil { + continue + } + + if ts, ok := parseTimestamp(env.Timestamp); ok { + lastActivity = ts + } + + if env.Type != "event_msg" { + continue + } + var payload struct { + Type string `json:"type"` + } + if err := json.Unmarshal(env.Payload, &payload); err != nil { + continue + } + switch payload.Type { + case "task_started": + waitState = WaitStateAgent + case "task_complete": + waitState = WaitStateUser + } + } + if err := scanner.Err(); err != nil { + return activityInfo{}, err + } + + if lastActivity.IsZero() { + if file.Meta != nil { + if ts, ok := parseTimestamp(file.Meta.Timestamp); ok { + lastActivity = ts + } + } + if lastActivity.IsZero() { + lastActivity = file.ModTime + } + } + if lineCount == 0 { + lineCount = 1 + } + + return activityInfo{ + LastActivityAt: lastActivity, + ActivityToken: fmt.Sprintf("%d:%d:%d", file.Size, lastActivity.UnixNano(), lineCount), + WaitState: waitState, + }, nil +} + +func parseTimestamp(value string) (time.Time, bool) { + value = strings.TrimSpace(value) + if value == "" { + return time.Time{}, false + } + if parsed, err := time.Parse(time.RFC3339Nano, value); err == nil { + return parsed, true + } + if parsed, err := time.Parse(time.RFC3339, value); err == nil { + return parsed, true + } + return time.Time{}, false +} + +func extractSnippets(path string, meta *sessions.SessionMeta) (Snippet, Snippet, bool, bool, error) { + session, err := sessions.ParseSession(path) + if err != nil { + return Snippet{}, Snippet{}, false, false, err + } + + userSnippet := Snippet{ + Title: "User", + SpeakerClass: "user", + } + assistantSnippet := Snippet{ + Title: "Agent", + SpeakerClass: "agent", + } + if session.Meta != nil && session.Meta.IsSubagentThread() { + userSnippet.Title = "Agent" + userSnippet.SpeakerClass = "agent" + assistantSnippet.Title = "Subagent" + assistantSnippet.SpeakerClass = "subagent" + } else if meta != nil && meta.IsSubagentThread() { + userSnippet.Title = "Agent" + userSnippet.SpeakerClass = "agent" + assistantSnippet.Title = "Subagent" + assistantSnippet.SpeakerClass = "subagent" + } + + hasUser := false + lastUserIndex := -1 + lastAssistantIndex := -1 + for index, item := range session.Items { + switch item.Role { + case "user": + if sessions.IsAutoContextUserMessage(item.Content) { + continue + } + hasUser = true + userSnippet.Text = item.Content + lastUserIndex = index + case "assistant": + assistantSnippet.Text = item.Content + lastAssistantIndex = index + } + } + userSnippet.Text = snippetFromContent(userSnippet.Text, 180) + assistantSnippet.Text = snippetFromContent(assistantSnippet.Text, 180) + return userSnippet, assistantSnippet, hasUser, lastAssistantIndex > lastUserIndex, nil +} + +func snippetFromContent(value string, max int) string { + value = strings.TrimSpace(value) + if value == "" { + return "" + } + value = strings.Join(strings.Fields(value), " ") + if max <= 0 { + return value + } + runes := []rune(value) + if len(runes) <= max { + return value + } + if max > 3 { + return string(runes[:max-3]) + "..." + } + return string(runes[:max]) +} diff --git a/internal/active/index_test.go b/internal/active/index_test.go new file mode 100644 index 0000000..35448c9 --- /dev/null +++ b/internal/active/index_test.go @@ -0,0 +1,159 @@ +package active + +import ( + "os" + "path/filepath" + "testing" + + "codex-manager/internal/sessions" +) + +func TestIndexRefreshFromBuildsTopLevelSummary(t *testing.T) { + root := t.TempDir() + sessionsDir := filepath.Join(root, "sessions") + dateDir := filepath.Join(sessionsDir, "2026", "03", "18") + if err := os.MkdirAll(dateDir, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + if err := os.WriteFile(filepath.Join(root, "session_index.jsonl"), []byte("{\"id\":\"parent-1\",\"thread_name\":\"parser bug\",\"updated_at\":\"2026-03-18T08:00:00Z\"}\n"), 0o600); err != nil { + t.Fatalf("write session index: %v", err) + } + + parentPath := filepath.Join(dateDir, "parent.jsonl") + parentData := "" + + "{\"timestamp\":\"2026-03-18T07:59:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"parent-1\",\"timestamp\":\"2026-03-18T07:59:00Z\",\"cwd\":\"/tmp/project\",\"git\":{\"branch\":\"feature/parser-fix\",\"commit_hash\":\"abc123\"},\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T07:59:10Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nFix the parser\"}]}}\n" + + "{\"timestamp\":\"2026-03-18T07:59:12Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"assistant\",\"content\":[{\"type\":\"output_text\",\"text\":\"Investigating now.\"}]}}\n" + + "{\"timestamp\":\"2026-03-18T08:00:00Z\",\"type\":\"event_msg\",\"payload\":{\"type\":\"task_started\"}}\n" + if err := os.WriteFile(parentPath, []byte(parentData), 0o600); err != nil { + t.Fatalf("write parent session: %v", err) + } + + subagentPath := filepath.Join(dateDir, "subagent.jsonl") + subagentData := "" + + "{\"timestamp\":\"2026-03-18T08:01:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"agent-1\",\"forked_from_id\":\"parent-1\",\"timestamp\":\"2026-03-18T08:01:00Z\",\"cwd\":\"/tmp/project\",\"originator\":\"cli\",\"cli_version\":\"0.1\",\"source\":{\"subagent\":{\"thread_spawn\":{\"parent_thread_id\":\"parent-1\",\"depth\":1,\"agent_nickname\":\"Anscombe\",\"agent_role\":\"explorer\"}}}}}\n" + + "{\"timestamp\":\"2026-03-18T08:01:10Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"assistant\",\"content\":[{\"type\":\"output_text\",\"text\":\"Subagent work.\"}]}}\n" + if err := os.WriteFile(subagentPath, []byte(subagentData), 0o600); err != nil { + t.Fatalf("write subagent session: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh sessions: %v", err) + } + + activeIdx := NewIndex() + if err := activeIdx.RefreshFrom(idx); err != nil { + t.Fatalf("refresh active index: %v", err) + } + + summaries := activeIdx.Summaries() + if len(summaries) != 1 { + t.Fatalf("expected 1 top-level summary, got %d", len(summaries)) + } + + summary := summaries[0] + if summary.Key != "id:parent-1" { + t.Fatalf("unexpected key: %q", summary.Key) + } + if summary.DisplayName != "parser bug (parent.jsonl)" { + t.Fatalf("unexpected display name: %q", summary.DisplayName) + } + if summary.WaitState != WaitStateAgent { + t.Fatalf("expected agent wait state, got %q", summary.WaitState) + } + if summary.LastActivityAt.Format("2006-01-02T15:04:05Z07:00") != "2026-03-18T08:00:00Z" { + t.Fatalf("unexpected last activity: %s", summary.LastActivityAt.Format(timeLayout)) + } + if summary.LastUserSnippet.Text != "Fix the parser" { + t.Fatalf("unexpected user snippet: %q", summary.LastUserSnippet.Text) + } + if summary.LastAssistantSnippet.Text != "Investigating now." { + t.Fatalf("unexpected assistant snippet: %q", summary.LastAssistantSnippet.Text) + } + if summary.Branch != "feature/parser-fix" { + t.Fatalf("unexpected branch: %q", summary.Branch) + } + if summary.ResumeCommand != "cd '/tmp/project'\ngit switch 'feature/parser-fix'\ncodex resume parent-1" { + t.Fatalf("unexpected resume command: %q", summary.ResumeCommand) + } +} + +func TestIndexRefreshFromUsesThinkingPlaceholderWhenLatestUserHasNoAssistantReply(t *testing.T) { + root := t.TempDir() + sessionsDir := filepath.Join(root, "sessions") + dateDir := filepath.Join(sessionsDir, "2026", "03", "19") + if err := os.MkdirAll(dateDir, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + filePath := filepath.Join(dateDir, "thinking.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-19T01:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-thinking\",\"timestamp\":\"2026-03-19T01:00:00Z\",\"cwd\":\"/tmp/project\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-19T01:00:05Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nEarlier request\"}]}}\n" + + "{\"timestamp\":\"2026-03-19T01:00:10Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"assistant\",\"content\":[{\"type\":\"output_text\",\"text\":\"Older assistant message.\"}]}}\n" + + "{\"timestamp\":\"2026-03-19T01:00:20Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nNewest request\"}]}}\n" + + "{\"timestamp\":\"2026-03-19T01:00:30Z\",\"type\":\"event_msg\",\"payload\":{\"type\":\"task_started\"}}\n" + if err := os.WriteFile(filePath, []byte(data), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh sessions: %v", err) + } + + activeIdx := NewIndex() + if err := activeIdx.RefreshFrom(idx); err != nil { + t.Fatalf("refresh active index: %v", err) + } + + summaries := activeIdx.Summaries() + if len(summaries) != 1 { + t.Fatalf("expected 1 summary, got %d", len(summaries)) + } + + summary := summaries[0] + if summary.WaitState != WaitStateAgent { + t.Fatalf("expected agent wait state, got %q", summary.WaitState) + } + if summary.LastUserSnippet.Text != "Newest request" { + t.Fatalf("unexpected user snippet: %q", summary.LastUserSnippet.Text) + } + if summary.LastAssistantSnippet.Text != thinkingPlaceholder { + t.Fatalf("expected thinking placeholder, got %q", summary.LastAssistantSnippet.Text) + } +} + +func TestStateStoreReconcileClearsEndedMarkOnNewActivity(t *testing.T) { + path := filepath.Join(t.TempDir(), "session_state.json") + + store, err := LoadStateStore(path) + if err != nil { + t.Fatalf("load store: %v", err) + } + if err := store.MarkEnded("id:session-1", "token-1"); err != nil { + t.Fatalf("mark ended: %v", err) + } + if got := len(store.Snapshot()); got != 1 { + t.Fatalf("expected 1 ended mark, got %d", got) + } + + if err := store.Reconcile([]Summary{{Key: "id:session-1", ActivityToken: "token-2"}}); err != nil { + t.Fatalf("reconcile: %v", err) + } + if got := len(store.Snapshot()); got != 0 { + t.Fatalf("expected ended mark to clear, got %d", got) + } + + reloaded, err := LoadStateStore(path) + if err != nil { + t.Fatalf("reload store: %v", err) + } + if got := len(reloaded.Snapshot()); got != 0 { + t.Fatalf("expected persisted ended mark to clear, got %d", got) + } +} + +const timeLayout = "2006-01-02T15:04:05Z07:00" diff --git a/internal/active/state.go b/internal/active/state.go new file mode 100644 index 0000000..1bedd71 --- /dev/null +++ b/internal/active/state.go @@ -0,0 +1,182 @@ +package active + +import ( + "encoding/json" + "errors" + "os" + "path/filepath" + "sync" + "time" +) + +const stateFileVersion = 1 + +// EndedMark is the persisted manual end marker for a session. +type EndedMark struct { + ActivityToken string `json:"activity_token,omitempty"` + EndedAt time.Time `json:"ended_at,omitempty"` +} + +type persistedState struct { + Version int `json:"version"` + Ended map[string]EndedMark `json:"ended,omitempty"` +} + +// StateStore persists manual ended/reopened flags independently from session files. +type StateStore struct { + path string + mu sync.RWMutex + data persistedState +} + +// DefaultStatePath returns the default path for persisted session-state metadata. +func DefaultStatePath(sessionsDir string) string { + return filepath.Join(filepath.Dir(sessionsDir), "session_state.json") +} + +// LoadStateStore opens or creates an empty state store. +func LoadStateStore(path string) (*StateStore, error) { + store := &StateStore{ + path: path, + data: persistedState{ + Version: stateFileVersion, + Ended: map[string]EndedMark{}, + }, + } + if path == "" { + return store, nil + } + + data, err := os.ReadFile(path) + if err != nil { + if errors.Is(err, os.ErrNotExist) { + return store, nil + } + return nil, err + } + if len(data) == 0 { + return store, nil + } + + var decoded persistedState + if err := json.Unmarshal(data, &decoded); err != nil { + return nil, err + } + if decoded.Version == 0 { + decoded.Version = stateFileVersion + } + if decoded.Ended == nil { + decoded.Ended = map[string]EndedMark{} + } + store.data = decoded + return store, nil +} + +// Path returns the backing file path. +func (s *StateStore) Path() string { + if s == nil { + return "" + } + return s.path +} + +// Snapshot returns a copy of the current ended marks. +func (s *StateStore) Snapshot() map[string]EndedMark { + if s == nil { + return nil + } + s.mu.RLock() + defer s.mu.RUnlock() + out := make(map[string]EndedMark, len(s.data.Ended)) + for key, value := range s.data.Ended { + out[key] = value + } + return out +} + +// MarkEnded stores the current activity token as manually ended. +func (s *StateStore) MarkEnded(key string, token string) error { + if s == nil || key == "" { + return nil + } + s.mu.Lock() + defer s.mu.Unlock() + s.data.Version = stateFileVersion + if s.data.Ended == nil { + s.data.Ended = map[string]EndedMark{} + } + s.data.Ended[key] = EndedMark{ + ActivityToken: token, + EndedAt: time.Now().UTC(), + } + return s.saveLocked() +} + +// Reopen removes an ended mark. +func (s *StateStore) Reopen(key string) error { + if s == nil || key == "" { + return nil + } + s.mu.Lock() + defer s.mu.Unlock() + if len(s.data.Ended) == 0 { + return nil + } + if _, ok := s.data.Ended[key]; !ok { + return nil + } + delete(s.data.Ended, key) + return s.saveLocked() +} + +// Reconcile clears ended markers when the underlying session received new activity. +func (s *StateStore) Reconcile(summaries []Summary) error { + if s == nil { + return nil + } + current := make(map[string]string, len(summaries)) + for _, summary := range summaries { + current[summary.Key] = summary.ActivityToken + } + + s.mu.Lock() + defer s.mu.Unlock() + if len(s.data.Ended) == 0 { + return nil + } + + changed := false + for key, mark := range s.data.Ended { + token, ok := current[key] + if !ok || token == "" || mark.ActivityToken == "" { + continue + } + if mark.ActivityToken != token { + delete(s.data.Ended, key) + changed = true + } + } + if !changed { + return nil + } + return s.saveLocked() +} + +func (s *StateStore) saveLocked() error { + if s.path == "" { + return nil + } + if err := os.MkdirAll(filepath.Dir(s.path), 0o700); err != nil { + return err + } + payload, err := json.MarshalIndent(s.data, "", " ") + if err != nil { + return err + } + payload = append(payload, '\n') + tmpPath := s.path + ".tmp" + if err := os.WriteFile(tmpPath, payload, 0o600); err != nil { + return err + } + return os.Rename(tmpPath, s.path) +} diff --git a/internal/notifications/store.go b/internal/notifications/store.go new file mode 100644 index 0000000..3f2c816 --- /dev/null +++ b/internal/notifications/store.go @@ -0,0 +1,211 @@ +package notifications + +import ( + "bufio" + "crypto/rand" + "encoding/hex" + "encoding/json" + "errors" + "os" + "path/filepath" + "sort" + "strings" + "sync" + "time" +) + +// Entry is one received notification request. +type Entry struct { + ID string `json:"id"` + ReceivedAt time.Time `json:"received_at"` + Method string `json:"method"` + Path string `json:"path"` + ContentType string `json:"content_type,omitempty"` + UserAgent string `json:"user_agent,omitempty"` + RemoteAddr string `json:"remote_addr,omitempty"` + Headers map[string][]string `json:"headers,omitempty"` + Body string `json:"body,omitempty"` + PrettyBody string `json:"pretty_body,omitempty"` + IsJSON bool `json:"is_json,omitempty"` + Size int `json:"size"` + Preview string `json:"preview,omitempty"` +} + +// Store persists received notifications and keeps them in memory for rendering. +type Store struct { + path string + mu sync.RWMutex + entries []Entry +} + +// DefaultPath returns the default log file for received notifications. +func DefaultPath(sessionsDir string) string { + return filepath.Join(filepath.Dir(sessionsDir), "notifications.jsonl") +} + +// LoadStore opens or creates an empty store. +func LoadStore(path string) (*Store, error) { + store := &Store{path: path, entries: []Entry{}} + if path == "" { + return store, nil + } + + file, err := os.Open(path) + if err != nil { + if errors.Is(err, os.ErrNotExist) { + return store, nil + } + return nil, err + } + defer file.Close() + + scanner := bufio.NewScanner(file) + scanner.Buffer(make([]byte, 0, 64*1024), 4*1024*1024) + for scanner.Scan() { + line := strings.TrimSpace(scanner.Text()) + if line == "" { + continue + } + var entry Entry + if err := json.Unmarshal([]byte(line), &entry); err != nil { + return nil, err + } + store.entries = append(store.entries, entry) + } + if err := scanner.Err(); err != nil { + return nil, err + } + return store, nil +} + +// AppendRequest records one incoming request body and metadata. +func (s *Store) AppendRequest(method string, path string, contentType string, userAgent string, remoteAddr string, headers map[string][]string, body []byte) (Entry, error) { + entry, err := buildEntry(method, path, contentType, userAgent, remoteAddr, headers, body) + if err != nil { + return Entry{}, err + } + + s.mu.Lock() + defer s.mu.Unlock() + + s.entries = append(s.entries, entry) + if err := s.appendLocked(entry); err != nil { + s.entries = s.entries[:len(s.entries)-1] + return Entry{}, err + } + return entry, nil +} + +// Entries returns notifications ordered from newest to oldest. +func (s *Store) Entries() []Entry { + s.mu.RLock() + defer s.mu.RUnlock() + if len(s.entries) == 0 { + return nil + } + out := make([]Entry, len(s.entries)) + for i := range s.entries { + out[len(s.entries)-1-i] = s.entries[i] + } + return out +} + +func buildEntry(method string, path string, contentType string, userAgent string, remoteAddr string, headers map[string][]string, body []byte) (Entry, error) { + id, err := randomID() + if err != nil { + return Entry{}, err + } + + text := strings.ToValidUTF8(string(body), "\uFFFD") + text = strings.ReplaceAll(text, "\x00", "") + pretty := "" + isJSON := false + trimmed := strings.TrimSpace(text) + if trimmed != "" { + var payload any + if err := json.Unmarshal([]byte(trimmed), &payload); err == nil { + formatted, err := json.MarshalIndent(payload, "", " ") + if err == nil { + pretty = string(formatted) + isJSON = true + } + } + } + + return Entry{ + ID: id, + ReceivedAt: time.Now().UTC(), + Method: strings.ToUpper(strings.TrimSpace(method)), + Path: strings.TrimSpace(path), + ContentType: strings.TrimSpace(contentType), + UserAgent: strings.TrimSpace(userAgent), + RemoteAddr: strings.TrimSpace(remoteAddr), + Headers: cloneHeaders(headers), + Body: text, + PrettyBody: pretty, + IsJSON: isJSON, + Size: len(body), + Preview: previewText(text, 160), + }, nil +} + +func cloneHeaders(headers map[string][]string) map[string][]string { + if len(headers) == 0 { + return nil + } + out := make(map[string][]string, len(headers)) + for key, values := range headers { + copied := append([]string(nil), values...) + sort.Strings(copied) + out[key] = copied + } + return out +} + +func (s *Store) appendLocked(entry Entry) error { + if s.path == "" { + return nil + } + if err := os.MkdirAll(filepath.Dir(s.path), 0o700); err != nil { + return err + } + payload, err := json.Marshal(entry) + if err != nil { + return err + } + payload = append(payload, '\n') + file, err := os.OpenFile(s.path, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o600) + if err != nil { + return err + } + defer file.Close() + _, err = file.Write(payload) + return err +} + +func randomID() (string, error) { + buf := make([]byte, 8) + if _, err := rand.Read(buf); err != nil { + return "", err + } + return hex.EncodeToString(buf), nil +} + +func previewText(value string, max int) string { + value = strings.TrimSpace(value) + if value == "" { + return "" + } + value = strings.Join(strings.Fields(value), " ") + if max <= 0 { + return value + } + runes := []rune(value) + if len(runes) <= max { + return value + } + if max > 3 { + return string(runes[:max-3]) + "..." + } + return string(runes[:max]) +} diff --git a/internal/notifications/store_test.go b/internal/notifications/store_test.go new file mode 100644 index 0000000..dd1cdf4 --- /dev/null +++ b/internal/notifications/store_test.go @@ -0,0 +1,55 @@ +package notifications + +import ( + "path/filepath" + "testing" +) + +func TestStoreAppendRequestPersistsAndLoads(t *testing.T) { + path := filepath.Join(t.TempDir(), "notifications.jsonl") + + store, err := LoadStore(path) + if err != nil { + t.Fatalf("load store: %v", err) + } + + entry, err := store.AppendRequest( + "post", + "/hook", + "application/json", + "curl/8.0", + "127.0.0.1:12345", + map[string][]string{"X-Test": {"b", "a"}}, + []byte("{\"type\":\"task_complete\",\"message\":\"done\"}"), + ) + if err != nil { + t.Fatalf("append request: %v", err) + } + if entry.ID == "" { + t.Fatal("expected entry id") + } + if !entry.IsJSON { + t.Fatal("expected json body") + } + if entry.PrettyBody == "" { + t.Fatal("expected pretty json body") + } + if entry.Preview == "" { + t.Fatal("expected preview") + } + + reloaded, err := LoadStore(path) + if err != nil { + t.Fatalf("reload store: %v", err) + } + entries := reloaded.Entries() + if len(entries) != 1 { + t.Fatalf("expected 1 entry, got %d", len(entries)) + } + if entries[0].ID != entry.ID { + t.Fatalf("expected matching id, got %q vs %q", entries[0].ID, entry.ID) + } + if entries[0].Headers["X-Test"][0] != "a" { + t.Fatalf("expected sorted header values, got %#v", entries[0].Headers["X-Test"]) + } +} diff --git a/internal/render/templates/active.html b/internal/render/templates/active.html new file mode 100644 index 0000000..fdf45de --- /dev/null +++ b/internal/render/templates/active.html @@ -0,0 +1,233 @@ +{{ define "active" }} + + + + + + {{ .Heading }} - Codex Sessions + {{ template "style" . }} + + +
+

All dates / All directories / Notifications

+

{{ .Heading }}

+ {{ if .SelectedCwd }} +

Directory filter active: {{ .SelectedCwdLabel }}. Clear directory filter

+ {{ end }} +
+ {{ range .Tabs }} + {{ .Label }} + {{ end }} +
+
+
+ {{ template "active_content" . }} +
+ + + +{{ end }} + +{{ define "active_content" }} +
+
+

Showing {{ .ThreadCount }} thread{{ if ne .ThreadCount 1 }}s{{ end }}{{ if .TimeZone }} | Time zone: {{ .TimeZone }}{{ end }}

+ +
+ {{ if .ShowDayNav }} +
+ ⬅️ Previous day + {{ .SelectedDate }} + Next day ➡️ +
+ {{ end }} +

Last scan: {{ .LastScan }}

+
+ +
+ {{ if .Threads }} + + {{ else }} +

{{ .EmptyMessage }}

+ {{ end }} +
+{{ end }} diff --git a/internal/render/templates/day.html b/internal/render/templates/day.html index fa8fce1..530034e 100644 --- a/internal/render/templates/day.html +++ b/internal/render/templates/day.html @@ -9,15 +9,121 @@
-

All dates{{ if .SelectedCwd }} / All directories / Directory dates{{ end }}

+

All dates / Active threads / Notifications{{ if .SelectedCwd }} / All directories / Directory dates{{ end }}

Sessions on {{ .Date.Label }}{{ if .SelectedCwdLabel }} – {{ .SelectedCwdLabel }}{{ end }}

{{ if .SelectedCwd }} -

Directory filter active. Clear filter / View directory dates

+

Directory filter active. Clear directory filter

+ {{ end }} +
+ {{ if .SelectedCwd }}{{ end }} + + +
+ {{ if .ActiveTabs }} +
+ {{ range .ActiveTabs }} + {{ .Label }} + {{ end }} +
{{ end }}
+ {{ if .SelectedCwd }} +
+ + +

+ +
+ {{ end }} + {{ if .FallbackDate }} +
+

No sessions found for this directory today.

+
+
+

--- Previous Day ---

+
+

Filter by directory (click to filter sessions below)

+ +
+ {{ if or .HasPrev .HasNext (gt .TotalPages 1) }} + + {{ end }} +
+ {{ if .FallbackSessions }} + + {{ else }} +

No sessions found for this directory.

+ {{ end }} +
+ {{ else }} {{ if .Dirs }} -
+

Filter by directory (click to filter sessions below)

{{ end }} -
+ {{ if or .HasPrev .HasNext (gt .TotalPages 1) }} + + {{ end }} +
{{ if .Sessions }} @@ -48,7 +200,267 @@

Sessions on {{ .Date.Label }}{{ if .SelectedCwdLabel }}

No sessions found{{ if .SelectedCwdLabel }} for this directory{{ end }}.

{{ end }}

+ {{ end }}
+ {{ end }} diff --git a/internal/render/templates/dir.html b/internal/render/templates/dir.html index 90ee7b4..d854157 100644 --- a/internal/render/templates/dir.html +++ b/internal/render/templates/dir.html @@ -9,20 +9,64 @@
-

All directories

-

Dates for {{ .Dir.Label }}

+

All directories / Active threads / Notifications

+

Sessions for {{ .Dir.Label }}

{{ .Dir.Count }} session{{ if ne .Dir.Count 1 }}s{{ end }}

+
+ + + +
+ {{ if or .HasPrev .HasNext (gt .TotalPages 1) }} + + {{ end }}
- {{ if .Dates }} + {{ if .Sessions }} @@ -31,6 +75,17 @@

Dates for {{ .Dir.Label }}

{{ end }}
+ {{ end }} diff --git a/internal/render/templates/index.html b/internal/render/templates/index.html index 4ddfd7b..79995b8 100644 --- a/internal/render/templates/index.html +++ b/internal/render/templates/index.html @@ -9,7 +9,7 @@
-

Codex sessions browser

+

Codex sessions browser / Active threads / Notifications

{{ if eq .View "dir" }}Available Directories{{ else }}Available Dates{{ end }}

By date @@ -136,12 +136,16 @@

{{ if eq .View "dir" }}Available Directories{{ else }}Ava setStatus(data.results.length + " result" + (data.results.length === 1 ? "" : "s") + "."); data.results.forEach(function (item) { var li = document.createElement("li"); - li.className = "search-result"; + li.className = "session-list-item search-result"; var link = document.createElement("a"); - link.className = "search-result-link"; - link.href = "/" + item.path + "/" + item.file + "#line-" + item.line; - link.textContent = item.file; + link.className = "link-item-link search-result-link"; + var targetLine = item.line; + if (item.role === "assistant" && item.prevUserLine) { + targetLine = item.prevUserLine; + } + link.href = "/" + item.path + "/" + item.file + "#line-" + targetLine; + link.textContent = item.displayFile || item.file; var meta = document.createElement("span"); meta.className = "meta search-result-meta"; @@ -163,11 +167,57 @@

{{ if eq .View "dir" }}Available Directories{{ else }}Ava li.appendChild(link); li.appendChild(meta); - if (item.preview) { - var snippet = document.createElement("div"); - snippet.className = "search-result-snippet"; - highlightText(snippet, item.preview, query); - li.appendChild(snippet); + var thread = document.createElement("div"); + thread.className = "session-snippet session-snippet-thread"; + var hasUserSnippet = false; + + function addSnippet(role, text, highlight, line) { + if (!text) return; + var snippetLink = document.createElement("a"); + snippetLink.className = "snippet-link"; + var resolvedLine = line && line > 0 ? line : targetLine; + snippetLink.href = "/" + item.path + "/" + item.file + "#line-" + resolvedLine; + + var itemBox = document.createElement("div"); + itemBox.className = "session-item role-" + role + " snippet-item"; + + var content = document.createElement("div"); + content.className = "session-content"; + if (highlight) { + highlightText(content, text, query); + } else { + content.textContent = text; + } + + itemBox.appendChild(content); + snippetLink.appendChild(itemBox); + thread.appendChild(snippetLink); + } + + if (item.role === "user") { + hasUserSnippet = !!item.preview; + addSnippet("user", item.preview, true, item.line); + if (item.nextAssistant) { + addSnippet("assistant", item.nextAssistant, false, item.nextAssistantLine); + } + } else if (item.role === "assistant") { + if (item.prevUser) { + hasUserSnippet = true; + addSnippet("user", item.prevUser, false, item.prevUserLine); + } + addSnippet("assistant", item.preview, true, item.line); + } else if (item.preview) { + addSnippet(item.role || "unknown", item.preview, true, item.line); + } + + if (thread.childNodes.length > 0) { + if (hasUserSnippet) { + var divider = document.createElement("div"); + divider.className = "snippet-divider"; + divider.textContent = ":"; + thread.insertBefore(divider, thread.firstChild); + } + li.appendChild(thread); } results.appendChild(li); diff --git a/internal/render/templates/notifications.html b/internal/render/templates/notifications.html new file mode 100644 index 0000000..018a228 --- /dev/null +++ b/internal/render/templates/notifications.html @@ -0,0 +1,175 @@ +{{ define "notifications" }} + + + + + + Notifications - Codex Sessions + {{ template "style" . }} + + +
+

All dates / Active threads

+

Notifications

+
+
+ {{ template "notifications_content" . }} +
+ + + +{{ end }} + +{{ define "notifications_content" }} +
+
+

Showing {{ len .Entries }} notification{{ if ne (len .Entries) 1 }}s{{ end }}

+ +
+

Last refresh: {{ .LastScan }}

+
+ +
+ {{ if .Entries }} + + {{ else }} +

{{ .EmptyMessage }}

+ {{ end }} +
+{{ end }} diff --git a/internal/render/templates/session.html b/internal/render/templates/session.html index e0480ea..3857559 100644 --- a/internal/render/templates/session.html +++ b/internal/render/templates/session.html @@ -4,32 +4,77 @@ - {{ .File.Name }} - Codex Session + {{ .File.DisplayName }} - Codex Session {{ template "style" . }}
+ {{ if .Meta }}
-

Session {{ .Meta.ID }}

-

CWD: {{ .Meta.Cwd }}

+

Session {{ .Meta.ID }}{{ if .IsSubagentThread }} Subagent thread{{ if .SubagentDisplayName }} {{ .SubagentDisplayName }}{{ end }}{{ if .SubagentDisplayRole }} {{ .SubagentDisplayRole }}{{ end }}{{ end }}

+ {{ if or .File.Cwd .File.Branch }}

{{ if .File.Cwd }}CWD: {{ .File.Cwd }}{{ end }}{{ if .File.Branch }}{{ if .File.Cwd }} | {{ end }}Branch: {{ .File.Branch }}{{ end }}

{{ end }}

Originator: {{ .Meta.Originator }} | CLI: {{ .Meta.CliVersion }}

+ {{ if .IsSubagentThread }} +

Conversation: Subagent{{ if .SubagentDisplayName }} {{ .SubagentDisplayName }}{{ end }}{{ if .SubagentDisplayRole }} ({{ .SubagentDisplayRole }}){{ end }} on the left, Agent on the right.{{ if and (not .ParentSessionPath) .ParentThreadID }} Parent session: {{ .ParentThreadID }}{{ end }}

+ {{ end }}
Instructions
{{ .Meta.Instructions }}
@@ -39,32 +84,81 @@

{{ .File.Name }}

{{ if .Items }} {{ range .Items }} -
+ {{ if .ToolRunGroupTitle }} +
+ + {{ .ToolRunGroupTitle }} + Lines {{ .Line }}-{{ .ToolRunGroupLastLine }} + +
+ {{ end }} +
+ {{ if eq .Line $.LastUserLine }}{{ end }} + {{ if eq .Line $.LastAgentLine }}{{ end }} + {{ if and (gt $.LastUserLine 0) (eq .Line $.LastUserLine) }}{{ else if and (eq $.LastUserLine 0) (eq .Line $.LastItemLine) (not .ToolRunOutputLine) }}{{ end }} + {{ if .IsTurnAborted }} +

{{ .TurnAbortedMessage }}

+ {{ else }} + {{ if not .ToolRunHideHeader }}
{{ .Title }} {{ .Type }}{{ if .Subtype }}:{{ .Subtype }}{{ end }} {{ .Timestamp }} - {{ if .Role }}{{ .Role }}{{ end }} + {{ if .RoleLabel }}{{ .RoleLabel }}{{ end }} {{ if .AutoCtx }}Auto context{{ end }} - Line {{ .Line }} - - + {{ if .SpeakerName }}{{ .SpeakerName }}{{ end }} + {{ if .SpeakerRole }}{{ .SpeakerRole }}{{ end }} + {{ if .SubagentStatusType }}{{ .SubagentStatusType }}{{ end }} + {{ if .SubagentID }}{{ .SubagentID }}{{ end }} + {{ if .SubagentSessionPath }}Open subagent thread{{ end }} + {{ if .ToolRunOutputLine }}Lines {{ .Line }} / {{ .ToolRunOutputLine }}{{ else }}Line {{ .Line }}{{ end }} +
- {{ if eq .Subtype "reasoning" }} + {{ end }} + {{ if .ToolRunOutputLine }} +
+

Tool call {{ .ToolRunCallTitle }} | {{ .Timestamp }} | Line {{ .Line }}{{ if .ToolRunHideHeader }}{{ end }}

+
{{ .HTML }}
+
+
+ + {{ if and (eq $.LastUserLine 0) (eq .ToolRunOutputLine $.LastItemLine) }}{{ end }} +

Tool output {{ .ToolRunOutputTitle }} | {{ .ToolRunOutputTime }} | Line {{ .ToolRunOutputLine }}

+
+ Reveal output +
{{ .ToolRunOutputHTML }}
+
+
+ {{ else if eq .Subtype "reasoning" }}
Reveal reasoning
{{ .HTML }}
+ {{ else if or (eq .Subtype "function_call_output") (eq .Subtype "custom_tool_call_output") }} +
+ Reveal output +
{{ .HTML }}
+
{{ else if .AutoCtx }}
Reveal Context
{{ .HTML }}
{{ else }} + {{ if .SubagentRequest }} +
+ Agent request +
{{ .SubagentRequestHTML }}
+
+ {{ end }}
{{ .HTML }}
{{ end }} - + {{ end }}
+ {{ if .ToolRunGroupEnd }} +
+
+ {{ end }} {{ end }} {{ else }}

No items found in this session.

@@ -82,6 +176,49 @@

{{ .File.Name }}

updateStickyOffset(); window.addEventListener("resize", updateStickyOffset); + function getStickyOffset() { + return stickyHeader ? stickyHeader.offsetHeight : 0; + } + function getElementTop(element) { + var rect = element.getBoundingClientRect(); + var scrollY = window.scrollY || window.pageYOffset || 0; + return rect.top + scrollY; + } + function scrollToElement(element, behavior) { + if (!element) return; + var offset = getStickyOffset() + 8; + var top = getElementTop(element) - offset; + if (top < 0) top = 0; + if (typeof window.scrollTo === "function") { + if (behavior) { + window.scrollTo({ top: top, behavior: behavior }); + } else { + window.scrollTo(0, top); + } + } else { + window.scrollTop = top; + } + } + function openAncestorDetails(element) { + var current = element; + while (current && current !== document.body) { + if (current.tagName === "DETAILS" && !current.open) { + current.open = true; + } + current = current.parentElement; + } + } + function collapsedAncestorSummary(element) { + var current = element; + while (current && current !== document.body) { + if (current.tagName === "DETAILS" && !current.open) { + return current.querySelector("summary"); + } + current = current.parentElement; + } + return null; + } + function copyText(text, target) { if (navigator.clipboard && navigator.clipboard.writeText) { navigator.clipboard.writeText(text).catch(function () {}); @@ -98,10 +235,26 @@

{{ .File.Name }}

document.body.removeChild(area); } + var copyCache = Object.create(null); + function fetchCopyText(url) { + if (copyCache[url]) { + return Promise.resolve(copyCache[url]); + } + return fetch(url, { credentials: "same-origin" }) + .then(function (response) { + if (!response.ok) throw new Error("copy fetch failed"); + return response.text(); + }) + .then(function (text) { + copyCache[url] = text; + return text; + }); + } + document.addEventListener("click", function (event) { var target = event.target; if (!(target instanceof HTMLElement)) return; - var trigger = target.closest("[data-copy-id],[data-copy-link]"); + var trigger = target.closest("[data-copy-id],[data-copy-link],[data-copy-url]"); if (!trigger) return; event.preventDefault(); var id = trigger.getAttribute("data-copy-id"); @@ -112,6 +265,15 @@

{{ .File.Name }}

copyText(text, trigger); return; } + var copyURL = trigger.getAttribute("data-copy-url"); + if (copyURL) { + fetchCopyText(copyURL) + .then(function (text) { + copyText(text, trigger); + }) + .catch(function () {}); + return; + } var anchorId = trigger.getAttribute("data-copy-link"); if (!anchorId) return; var origin = window.location.origin; @@ -122,6 +284,35 @@

{{ .File.Name }}

copyText(base + "#" + anchorId, trigger); }); + document.addEventListener("click", function (event) { + var target = event.target; + if (!(target instanceof HTMLElement)) return; + var stateTrigger = target.closest("[data-active-action]"); + if (!stateTrigger) return; + event.preventDefault(); + + var action = stateTrigger.getAttribute("data-active-action"); + var key = stateTrigger.getAttribute("data-active-key"); + if (!action || !key) return; + + fetch("/active/state", { + method: "POST", + credentials: "same-origin", + headers: { + "Content-Type": "application/x-www-form-urlencoded" + }, + body: "action=" + encodeURIComponent(action) + "&key=" + encodeURIComponent(key) + }) + .then(function (response) { + if (!response.ok) throw new Error("state update failed"); + return response.json(); + }) + .then(function () { + window.location.reload(); + }) + .catch(function () {}); + }); + var shareForm = document.querySelector(".share-form"); var shareBanner = document.getElementById("share-banner"); if (shareForm && shareBanner) { @@ -171,34 +362,15 @@

{{ .File.Name }}

var jumpNext = document.getElementById("jump-user-next"); if (jumpPrev || jumpNext) { var userSections = Array.prototype.slice.call( - document.querySelectorAll("section.session-item.role-user:not(.auto-context)") + document.querySelectorAll("section.session-item.role-user:not(.auto-context):not(.subagent-notification)") ); - var getStickyOffset = function () { - return stickyHeader ? stickyHeader.offsetHeight : 0; - }; - var getSectionTop = function (section) { - var rect = section.getBoundingClientRect(); - var scrollY = window.scrollY || window.pageYOffset || 0; - return rect.top + scrollY; - }; - var scrollToSection = function (section) { - if (!section) return; - var offset = getStickyOffset() + 8; - var top = getSectionTop(section) - offset; - if (top < 0) top = 0; - if (typeof window.scrollTo === "function") { - window.scrollTo({ top: top, behavior: "smooth" }); - } else { - window.scrollTop = top; - } - }; var findCurrentIndex = function () { var offset = getStickyOffset() + 8; var scrollY = window.scrollY || window.pageYOffset || 0; var position = scrollY + offset + 1; var current = -1; for (var i = 0; i < userSections.length; i++) { - if (getSectionTop(userSections[i]) <= position) { + if (getElementTop(userSections[i]) <= position) { current = i; } else { break; @@ -211,19 +383,38 @@

{{ .File.Name }}

if (!userSections.length) return; var index = findCurrentIndex(); var target = index <= 0 ? 0 : index - 1; - scrollToSection(userSections[target]); + scrollToElement(userSections[target], "smooth"); }; var goNext = function (event) { if (event) event.preventDefault(); if (!userSections.length) return; var index = findCurrentIndex(); var target = index < 0 ? 0 : Math.min(index + 1, userSections.length - 1); - scrollToSection(userSections[target]); + scrollToElement(userSections[target], "smooth"); }; if (jumpPrev) jumpPrev.addEventListener("click", goPrev); if (jumpNext) jumpNext.addEventListener("click", goNext); } + function handleInitialJump() { + var hash = window.location.hash || ""; + if (hash.length > 1) { + var target = document.getElementById(hash.slice(1)); + if (target) { + openAncestorDetails(target); + setTimeout(function () { scrollToElement(target); }, 0); + return; + } + } + var lastItem = document.getElementById("last-item"); + if (lastItem) { + var summary = collapsedAncestorSummary(lastItem); + setTimeout(function () { scrollToElement(summary || lastItem); }, 0); + } + } + handleInitialJump(); + window.addEventListener("hashchange", handleInitialJump); + })(); diff --git a/internal/render/templates/style.html b/internal/render/templates/style.html index 576065e..0560d7d 100644 --- a/internal/render/templates/style.html +++ b/internal/render/templates/style.html @@ -7,14 +7,25 @@ --muted: #8fa6a3; --accent: #49c1b5; --border: #243033; + --border-strong: #3b4a4f; --user: #15333a; - --assistant: #171f21; + --assistant: #223034; --tool: #1a2b27; --system: #1a2123; --error: #3a1f22; --bg-glow: #142529; --banner: #0f1718; --code: #0a1112; + --speaker-agent-bg: #163847; + --speaker-agent-border: rgba(102, 199, 232, 0.42); + --speaker-agent-glow: rgba(102, 199, 232, 0.16); + --speaker-subagent-bg: #18281c; + --speaker-subagent-border: rgba(146, 196, 96, 0.42); + --speaker-subagent-glow: rgba(146, 196, 96, 0.16); + --speaker-user-bg: #3a2918; + --speaker-user-border: rgba(211, 154, 82, 0.42); + --speaker-user-glow: rgba(211, 154, 82, 0.16); + --speaker-auto-bg: #172124; } body.theme-noir-blue { --bg: #0b0f14; @@ -23,8 +34,9 @@ --muted: #9aa5b1; --accent: #7fb6ff; --border: #263243; + --border-strong: #3b4a60; --user: #1e2f4d; - --assistant: #1a2230; + --assistant: #243041; --tool: #1b2b2a; --system: #1c2330; --error: #3a1f25; @@ -39,8 +51,9 @@ --muted: #b09b84; --accent: #e0a35c; --border: #332a21; + --border-strong: #4a3c2d; --user: #2c2318; - --assistant: #201b14; + --assistant: #2a241b; --tool: #2a2319; --system: #251f17; --error: #3a1f1a; @@ -55,8 +68,9 @@ --muted: #8fa6a3; --accent: #49c1b5; --border: #243033; + --border-strong: #3b4a4f; --user: #15333a; - --assistant: #171f21; + --assistant: #223034; --tool: #1a2b27; --system: #1a2123; --error: #3a1f22; @@ -71,8 +85,9 @@ --muted: #9eb39a; --accent: #9ad36a; --border: #243228; + --border-strong: #3b4b3c; --user: #1f2f23; - --assistant: #18211b; + --assistant: #223025; --tool: #1d2a1f; --system: #1b231d; --error: #3b1f20; @@ -87,8 +102,9 @@ --muted: #b59bb0; --accent: #e06c9f; --border: #33263a; + --border-strong: #4a3b4f; --user: #2f1c2a; - --assistant: #211820; + --assistant: #2b202a; --tool: #2a1c27; --system: #241a22; --error: #3b1f27; @@ -103,8 +119,9 @@ --muted: #9aaab5; --accent: #5cc7e6; --border: #24313a; + --border-strong: #3a4a56; --user: #1d2e3a; - --assistant: #181f24; + --assistant: #222c33; --tool: #1b2a2f; --system: #1b2226; --error: #3a1f23; @@ -142,6 +159,31 @@ color: var(--accent); text-decoration: none; } +.cwd-link { + color: var(--accent); + text-decoration: none; +} +.nav-btn { + border: 1px solid var(--border); + background: transparent; + color: var(--accent); + padding: 2px 8px; + border-radius: 999px; + font-size: 12px; + cursor: pointer; + text-decoration: none; + display: inline-flex; + align-items: center; + gap: 6px; +} +.nav-btn:hover { + background: var(--border); +} +.nav-btn.disabled { + opacity: 0.45; + cursor: default; + pointer-events: none; +} .tabs { margin-top: 10px; display: flex; @@ -189,13 +231,22 @@ font-size: 32px; margin: 0 0 8px; } +.page-title .title-links { + font-size: 14px; + font-weight: normal; + margin-left: 6px; +} +.page-title .title-link { + font-size: 14px; + font-weight: normal; +} .subtitle { color: var(--muted); margin: 0; } .card { background: var(--panel); - border: 1px solid var(--border); + border: 1px solid var(--border-strong); border-radius: 12px; padding: 16px 20px; margin: 16px 0; @@ -262,15 +313,11 @@ margin: 0; } .list li { - padding: 10px 0; border-bottom: 1px dashed var(--border); } .list li:last-child { border-bottom: none; } -.link-list li { - padding: 0; -} .dir-item { padding: 0; margin: 8px 0; @@ -325,8 +372,185 @@ text-decoration: none; transition: background-color 0.15s ease; } +.session-snippet { + margin-top: 10px; +} +.session-snippet-thread { + display: grid; + gap: 10px; +} +.snippet-divider { + text-align: center; + color: var(--muted); + font-size: 14px; + letter-spacing: 0.2em; +} +.snippet-link { + display: block; + color: inherit; + text-decoration: none; +} +.snippet-link:focus-visible { + outline: 2px solid var(--accent); + outline-offset: 2px; +} +.session-snippet-thread .session-item { + margin-bottom: 0; + padding: 10px 12px; +} +.session-snippet-thread .snippet-item-left { + margin-right: 33%; +} +.session-snippet-thread .snippet-item-right { + margin-left: 33%; +} +.session-snippet-thread .session-content { + font-size: 13px; + line-height: 1.5; +} +.snippet-speaker { + margin: 0 0 6px; + font-size: 11px; + font-weight: bold; + letter-spacing: 0.08em; + text-transform: uppercase; + color: var(--muted); +} +.session-list-item { + padding: 10px; + border-radius: 10px; + transition: box-shadow 0.15s ease; +} +.active-list { + display: grid; + gap: 12px; +} +.active-list li { + border-bottom: none; +} +.active-date-divider { + display: flex; + align-items: center; + gap: 12px; + padding: 2px 4px; + color: var(--muted); + font-size: 12px; + letter-spacing: 0.08em; + text-transform: uppercase; +} +.active-date-divider::before, +.active-date-divider::after { + content: ""; + flex: 1 1 0; + min-width: 24px; + border-top: 1px solid var(--border-strong); + opacity: 0.85; +} +.active-date-divider-label { + white-space: nowrap; +} +.active-list-item { + border: 1px solid var(--border-strong); + background: rgba(255, 255, 255, 0.02); +} +.list li.session-list-item.thread-status-waiting-user { + border: 5px solid rgba(211, 154, 82, 0.56); + padding: 6px; +} +.active-session-header { + display: flex; + justify-content: space-between; + align-items: flex-start; + gap: 14px; +} +.active-session-title { + min-width: 0; + flex: 1 1 auto; +} +.active-session-actions { + display: flex; + flex-wrap: wrap; + justify-content: flex-end; + gap: 8px; + flex: 0 0 auto; +} +.active-toolbar { + display: flex; + flex-direction: column; + gap: 10px; +} +.notification-preview { + margin: 10px 0 0; +} +.notification-details { + margin-top: 10px; +} +.notification-body { + margin: 8px 0 0; + padding: 12px 14px; + background: var(--code); + border: 1px solid var(--border); + border-radius: 10px; + overflow: auto; + white-space: pre-wrap; + word-break: break-word; + font-size: 13px; + line-height: 1.5; +} +.notification-headers { + margin-top: 8px; +} +.notification-headers p { + margin: 0 0 6px; +} +.active-toolbar-row, +.active-day-nav { + display: flex; + flex-wrap: wrap; + align-items: center; + justify-content: space-between; + gap: 10px; +} +.session-list-item.search-result { + border: 1px solid var(--border-strong); + margin: 12px 0; +} +.session-list-item:hover, +.session-list-item:focus-within { + box-shadow: inset 0 0 0 9999px rgba(255, 255, 255, 0.05); + cursor: pointer; +} +.pagination { + display: flex; + align-items: center; + gap: 10px; + margin: 10px 0 14px; + flex-wrap: wrap; +} +.page-meta { + color: var(--muted); + font-size: 12px; +} +.filter-form { + margin-top: 6px; +} +.filter-toggle { + display: inline-flex; + align-items: center; + gap: 8px; + font-size: 12px; + color: var(--muted); +} +.filter-checkbox { + width: 16px; + height: 16px; + accent-color: var(--accent); +} +.filter-label { + user-select: none; +} .link-item-link:hover { - background: rgba(255, 255, 255, 0.05); + background: transparent; } .link-item-link:focus-visible { outline: 2px solid var(--accent); @@ -344,6 +568,9 @@ color: var(--muted); font-size: 14px; } +summary.meta { + cursor: pointer; +} .session-item { padding: 16px; border-radius: 10px; @@ -352,11 +579,43 @@ background: var(--panel); scroll-margin-top: calc(var(--sticky-offset, 180px) + 8px); } -.session-item.role-user { background: var(--user); } -.session-item.role-user .session-content { - white-space: pre-wrap; +.turn-aborted-item { + margin: 8px 0 16px; + margin-left: 33%; + padding: 0; + border: none; + background: transparent; + scroll-margin-top: calc(var(--sticky-offset, 180px) + 8px); +} +.turn-aborted-text { + margin: 0; + color: #ff6b6b; + font-size: 14px; + line-height: 1.6; } +.session-anchor { + display: block; + height: 0; + width: 0; + scroll-margin-top: calc(var(--sticky-offset, 180px) + 8px); +} +.session-item.role-user { background: var(--user); } .session-item.role-assistant { background: var(--assistant); } +.session-item.speaker-user { + background: var(--speaker-user-bg); + border-color: var(--speaker-user-border); + box-shadow: inset 0 0 0 1px var(--speaker-user-glow); +} +.session-item.speaker-agent { + background: var(--speaker-agent-bg); + border-color: var(--speaker-agent-border); + box-shadow: inset 0 0 0 1px var(--speaker-agent-glow); +} +.session-item.speaker-subagent { + background: var(--speaker-subagent-bg); + border-color: var(--speaker-subagent-border); + box-shadow: inset 0 0 0 1px var(--speaker-subagent-glow); +} .session-item.role-tool { background: var(--tool); } .session-item.role-system, .session-item.role-unknown { background: var(--system); } .session-item.role-error { background: var(--error); } @@ -364,10 +623,20 @@ border-color: rgba(73, 193, 181, 0.35); box-shadow: inset 0 0 0 1px rgba(73, 193, 181, 0.18); } +.session-item.speaker-subagent.subagent-notification, +.session-item.subagent-notification { + margin-left: 7%; + margin-right: 33%; + background: + linear-gradient(180deg, rgba(25, 47, 29, 0.98), rgba(15, 30, 19, 0.95)); + border-color: rgba(146, 196, 96, 0.34); + box-shadow: inset 0 0 0 1px rgba(146, 196, 96, 0.14); +} .session-item.role-user { margin-left: 33%; } -.session-item.role-assistant { +.session-item.role-assistant, +.session-item.role-tool { margin-right: 33%; } .session-header { @@ -377,6 +646,43 @@ align-items: baseline; margin-bottom: 10px; } +.session-group-details { + margin: 8px 0 10px; +} +.session-group-details[open] { + margin-bottom: 16px; +} +.session-group-header { + display: flex; + flex-wrap: wrap; + gap: 12px; + align-items: baseline; + margin-right: 33%; + padding: 0 4px; + cursor: pointer; + list-style: none; +} +.session-group-header::-webkit-details-marker { + display: none; +} +.session-group-title { + font-size: 16px; + font-weight: 600; + color: var(--ink); +} +.session-group-title::before { + content: "▶"; + display: inline-block; + margin-right: 8px; + font-size: 11px; + transition: transform 0.14s ease; +} +.session-group-details[open] .session-group-title::before { + transform: rotate(90deg); +} +.session-group-body { + margin-top: 10px; +} .copy-btn { border: 1px solid var(--border); background: transparent; @@ -393,6 +699,116 @@ display: inline-block; margin: 0; } +.session-page-toolbar { + display: flex; + flex-direction: column; + gap: 8px; +} +.session-page-toolbar-row { + display: flex; + flex-wrap: wrap; + align-items: center; + justify-content: space-between; + gap: 10px 14px; +} +.session-page-thread-row { + align-items: flex-start; +} +.session-page-thread-fields, +.session-page-thread-actions, +.session-page-jump-actions { + display: flex; + flex-wrap: wrap; + align-items: center; + gap: 8px; +} +.session-page-thread-fields { + min-width: 0; + flex: 0 1 auto; + align-items: flex-start; +} +.session-page-thread-actions { + margin-left: auto; + flex: 0 1 auto; + justify-content: flex-end; + row-gap: 10px; +} +.session-page-thread-state-group { + display: inline-flex; + flex-wrap: wrap; + align-items: center; + justify-content: flex-end; + gap: 8px; +} +.session-page-thread-field { + display: inline-flex; + align-self: stretch; + align-items: center; + gap: 8px; + min-width: 0; + padding: 5px 10px; + border-radius: 999px; + border: 1px solid var(--border); + background: rgba(255, 255, 255, 0.02); +} +.session-page-thread-field-link { + color: inherit; + text-decoration: none; +} +.session-page-thread-field-link:hover { + border-color: var(--accent); + background: rgba(255, 255, 255, 0.05); +} +.session-page-thread-field-link:focus-visible { + outline: 2px solid var(--accent); + outline-offset: 2px; +} +.session-page-thread-field span:last-child { + min-width: 0; + overflow-wrap: anywhere; +} +.session-page-branch-icon { + display: inline-flex; + align-items: center; + justify-content: center; + width: 14px; + height: 14px; + color: var(--accent); +} +.session-page-branch-icon-svg { + width: 14px; + height: 14px; + fill: currentColor; +} +.session-page-thread-actions .nav-btn, +.session-page-thread-state-group, +.session-page-toolbar-separator { + white-space: nowrap; +} +.session-page-thread-state-label, +.session-page-jump-label { + text-transform: uppercase; + letter-spacing: 0.08em; + font-size: 11px; +} +.session-page-toolbar-separator { + color: var(--muted); + opacity: 0.8; +} +.session-page-jump-row { + justify-content: flex-start; + align-items: center; +} +@media (max-width: 1280px) { + .session-page-thread-actions { + flex-basis: 100%; + margin-left: 0; + justify-content: flex-end; + } + .session-page-thread-state-group { + justify-content: flex-end; + } +} .share-banner { margin-top: 8px; padding: 8px 12px; @@ -457,6 +873,30 @@ .session-content ol { margin: 0 0 0.9em 1.2em; } +.session-content .update-plan-list { + list-style: none; + margin-left: 0; + padding-left: 0; +} +.session-content .update-plan-step { + display: flex; + align-items: flex-start; + gap: 8px; +} +.session-content .update-plan-step + .update-plan-step { + margin-top: 0.3em; +} +.session-content .update-plan-marker { + flex: 0 0 auto; + color: var(--ink); +} +.session-content .update-plan-text { + flex: 1 1 auto; + min-width: 0; +} +.session-content .update-plan-step.is-in-progress .update-plan-text { + color: var(--accent); +} .session-content pre { margin: 0 0 0.9em; padding: 12px 14px; @@ -464,10 +904,13 @@ border: 1px solid var(--border); border-radius: 10px; overflow: auto; + white-space: pre-wrap; + overflow-wrap: anywhere; + word-break: break-word; } .session-content code { background: var(--code); - padding: 2px 6px; + padding: 4px 8px; border-radius: 6px; border: 1px solid var(--border); } @@ -475,6 +918,119 @@ padding: 0; border: none; background: none; + white-space: inherit; + overflow-wrap: inherit; + word-break: inherit; +} +.patch-block { + margin: 0 0 0.9em; + border: 1px solid var(--border); + border-radius: 10px; + overflow: hidden; + background: var(--code); + box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.03); +} +.patch-line { + display: block; + padding: 4px 12px; + white-space: pre-wrap; + overflow-wrap: anywhere; + word-break: break-word; +} +.patch-line + .patch-line { + border-top: 1px solid rgba(255, 255, 255, 0.04); +} +.patch-line-marker { + background: rgba(94, 104, 122, 0.18); + color: #d5deea; + font-weight: 600; +} +.patch-line-file { + background: rgba(102, 199, 232, 0.14); + color: #c9f0ff; +} +.patch-line-hunk { + background: rgba(188, 153, 79, 0.14); + color: #f4dfad; +} +.patch-line-add { + background: rgba(82, 166, 107, 0.16); + color: #d7f5de; +} +.patch-line-del { + background: rgba(179, 88, 88, 0.16); + color: #ffd8d8; +} +.patch-line-context { + color: var(--ink); +} +.tool-run-part + .tool-run-part { + margin-top: 14px; +} +.tool-run-part { + padding: 12px 14px; + border-radius: 12px; + border: 1px solid transparent; +} +.tool-run-part-call { + background: rgba(43, 113, 103, 0.14); + border-color: rgba(96, 196, 179, 0.28); +} +.tool-run-part-output { + background: rgba(122, 79, 35, 0.12); + border-color: rgba(211, 154, 82, 0.28); + box-shadow: inset 0 1px 0 rgba(211, 154, 82, 0.18); +} +.tool-run-part-output .session-content pre, +.tool-run-part-output .session-content code { + border-color: rgba(211, 154, 82, 0.22); +} +.tool-output-details { + margin-top: 10px; +} +.tool-output-summary { + cursor: pointer; + user-select: none; + display: inline-flex; + align-items: center; + gap: 8px; + padding: 6px 10px; + border-radius: 999px; + border: 1px solid var(--border); + background: rgba(255, 255, 255, 0.03); +} +.tool-output-summary:hover { + background: rgba(255, 255, 255, 0.06); +} +.tool-output-summary::before { + content: "▶"; + font-size: 11px; + transition: transform 0.14s ease; +} +.tool-output-details[open] > .tool-output-summary::before { + transform: rotate(90deg); +} +.tool-output-details > .session-content { + margin-top: 12px; +} +.tool-run-part-meta { + margin: 0 0 10px; + display: flex; + flex-wrap: wrap; + align-items: center; + gap: 8px; +} +.tool-run-part-actions { + margin-left: auto; + display: inline-flex; + align-items: center; + gap: 6px; +} +.session-item.tool-run-group-member { + margin-bottom: 10px; +} +.session-item.tool-run-group-member.tool-run-group-end { + margin-bottom: 16px; } .session-content blockquote { margin: 0 0 0.9em; @@ -482,6 +1038,18 @@ border-left: 3px solid var(--border); color: var(--muted); } +.session-content table { + border-collapse: collapse; + border-spacing: 0; + margin: 0 0 0.9em; +} +.session-content th, +.session-content td { + border: 1px solid rgba(255, 255, 255, 0.18); + padding: 6px 8px; + text-align: left; + vertical-align: top; +} .tag { display: inline-block; padding: 2px 8px; @@ -490,11 +1058,88 @@ background: var(--border); color: var(--ink); } +.tag-role-agent, +.tag-role-subagent, +.tag-role-user, +.tag-role-tool, +.tag-role-system { + text-transform: capitalize; +} +.tag-role-agent { + background: rgba(30, 92, 128, 0.24); + border: 1px solid rgba(102, 199, 232, 0.42); +} +.tag-role-subagent { + background: rgba(54, 92, 39, 0.24); + border: 1px solid rgba(146, 196, 96, 0.42); +} +.tag-role-user { + background: rgba(122, 79, 35, 0.24); + border: 1px solid rgba(211, 154, 82, 0.42); +} +.tag-role-tool { + background: rgba(43, 113, 103, 0.24); + border: 1px solid rgba(96, 196, 179, 0.42); +} +.tag-tool-run-call { + background: rgba(43, 113, 103, 0.26); + border: 1px solid rgba(96, 196, 179, 0.5); +} +.tag-tool-run-output { + background: rgba(122, 79, 35, 0.26); + border: 1px solid rgba(211, 154, 82, 0.5); +} +.tag-role-system { + background: rgba(94, 104, 122, 0.22); + border: 1px solid rgba(174, 186, 207, 0.38); +} .tag-auto { background: rgba(73, 193, 181, 0.2); color: var(--ink); border: 1px solid rgba(73, 193, 181, 0.55); } +.tag-subagent { + background: rgba(54, 92, 39, 0.24); + color: var(--ink); + border: 1px solid rgba(146, 196, 96, 0.42); +} +.tag-subagent-name { + background: rgba(36, 74, 36, 0.28); + color: var(--ink); + border: 1px solid rgba(121, 172, 83, 0.36); +} +.tag-subagent-role { + background: rgba(64, 98, 46, 0.24); + color: var(--ink); + border: 1px solid rgba(146, 196, 96, 0.36); +} +.tag-status-waiting-user { + background: rgb(122, 79, 35); + border: 1px solid rgb(211, 154, 82); +} +.tag-status-waiting-agent { + background: rgba(30, 92, 128, 0.24); + border: 1px solid rgba(102, 199, 232, 0.42); +} +.tag-status-ended { + background: rgba(103, 60, 78, 0.22); + border: 1px solid rgba(224, 108, 159, 0.36); +} +.subagent-id { + font-family: monospace; +} +.subagent-link { + white-space: nowrap; +} +.subagent-request { + margin: 0 0 12px; +} +.subagent-request-body { + margin-top: 8px; + background: rgba(2, 14, 18, 0.52); + border-radius: 8px; + border: 1px solid rgba(255, 255, 255, 0.05); +} @media (max-width: 768px) { header, main { padding: 16px; @@ -502,10 +1147,45 @@ .page-title { font-size: 26px; } + .active-session-header { + flex-direction: column; + } + .active-session-actions { + justify-content: flex-start; + } + .session-page-toolbar-row { + align-items: flex-start; + } + .session-page-thread-actions { + justify-content: flex-end; + flex-basis: 100%; + margin-left: 0; + } + .session-page-thread-state-group { + justify-content: flex-end; + } + .session-snippet-thread .snippet-item-left { + margin-right: 12%; + } + .session-snippet-thread .snippet-item-right { + margin-left: 12%; + } .session-item.role-user { margin-left: 12%; } - .session-item.role-assistant { + .session-item.speaker-subagent.subagent-notification, + .session-item.subagent-notification { + margin-left: 7%; + margin-right: 12%; + } + .session-item.role-assistant, + .session-item.role-tool { + margin-right: 12%; + } + .session-group-details { + margin-right: 0; + } + .session-group-header { margin-right: 12%; } } diff --git a/internal/repooverride/store.go b/internal/repooverride/store.go new file mode 100644 index 0000000..114fcd9 --- /dev/null +++ b/internal/repooverride/store.go @@ -0,0 +1,136 @@ +package repooverride + +import ( + "encoding/json" + "errors" + "fmt" + "os" + "path/filepath" + "strings" +) + +const fileVersion = 1 + +type persistedStore struct { + Version int `json:"version"` + Rules []Rule `json:"rules,omitempty"` +} + +// Rule overrides the repository URL used for sessions under one cwd prefix. +type Rule struct { + CwdPrefix string `json:"cwd_prefix"` + RepositoryURL string `json:"repository_url"` +} + +// Store keeps repository URL overrides for specific cwd prefixes. +type Store struct { + path string + rules []Rule +} + +// DefaultPath returns the default path for persisted repository overrides. +func DefaultPath(sessionsDir string) string { + sessionsDir = strings.TrimSpace(sessionsDir) + if sessionsDir == "" { + return "" + } + return filepath.Join(filepath.Dir(sessionsDir), "session_repository_overrides.json") +} + +// LoadStore opens or creates an empty override store. +func LoadStore(path string) (*Store, error) { + store := &Store{path: path} + if path == "" { + return store, nil + } + + data, err := os.ReadFile(path) + if err != nil { + if errors.Is(err, os.ErrNotExist) { + return store, nil + } + return nil, err + } + if strings.TrimSpace(string(data)) == "" { + return store, nil + } + + var decoded persistedStore + if err := json.Unmarshal(data, &decoded); err != nil { + return nil, err + } + if decoded.Version == 0 { + decoded.Version = fileVersion + } + if decoded.Version != fileVersion { + return nil, fmt.Errorf("unsupported repository override version %d", decoded.Version) + } + store.rules = normalizeRules(decoded.Rules) + return store, nil +} + +// Path returns the backing file path. +func (s *Store) Path() string { + if s == nil { + return "" + } + return s.path +} + +// ResolveRepositoryURL returns the best override for cwd using longest-prefix match. +func (s *Store) ResolveRepositoryURL(cwd string) string { + if s == nil { + return "" + } + cwd = normalizeCwd(cwd) + if cwd == "" { + return "" + } + + bestURL := "" + bestPrefixLen := -1 + for _, rule := range s.rules { + if !hasPathPrefix(cwd, rule.CwdPrefix) { + continue + } + if length := len(rule.CwdPrefix); length > bestPrefixLen { + bestPrefixLen = length + bestURL = rule.RepositoryURL + } + } + return bestURL +} + +func normalizeRules(rules []Rule) []Rule { + out := make([]Rule, 0, len(rules)) + for _, rule := range rules { + cwdPrefix := normalizeCwd(rule.CwdPrefix) + repositoryURL := strings.TrimSpace(rule.RepositoryURL) + if cwdPrefix == "" || repositoryURL == "" { + continue + } + out = append(out, Rule{ + CwdPrefix: cwdPrefix, + RepositoryURL: repositoryURL, + }) + } + return out +} + +func normalizeCwd(value string) string { + value = strings.TrimSpace(value) + if value == "" { + return "" + } + return filepath.Clean(value) +} + +func hasPathPrefix(path string, prefix string) bool { + if path == prefix { + return true + } + if prefix == string(filepath.Separator) { + return strings.HasPrefix(path, prefix) + } + return strings.HasPrefix(path, prefix+string(filepath.Separator)) +} diff --git a/internal/repooverride/store_test.go b/internal/repooverride/store_test.go new file mode 100644 index 0000000..b444d92 --- /dev/null +++ b/internal/repooverride/store_test.go @@ -0,0 +1,61 @@ +package repooverride + +import ( + "os" + "path/filepath" + "testing" +) + +func TestLoadStoreResolveRepositoryURLPrefersLongestPrefix(t *testing.T) { + path := filepath.Join(t.TempDir(), "session_repository_overrides.json") + data := `{ + "version": 1, + "rules": [ + { + "cwd_prefix": "/home/makoto", + "repository_url": "https://github.com/example/root.git" + }, + { + "cwd_prefix": "/home/makoto/codex-manager", + "repository_url": "https://github.com/makoto-soracom/codex-manager.git" + }, + { + "cwd_prefix": "/home/makoto/codex-manager/docs", + "repository_url": "https://github.com/makoto-soracom/codex-manager-docs.git" + } + ] +} +` + if err := os.WriteFile(path, []byte(data), 0o600); err != nil { + t.Fatalf("write overrides: %v", err) + } + + store, err := LoadStore(path) + if err != nil { + t.Fatalf("LoadStore: %v", err) + } + + if got := store.ResolveRepositoryURL("/home/makoto/codex-manager"); got != "https://github.com/makoto-soracom/codex-manager.git" { + t.Fatalf("expected codex-manager override, got %q", got) + } + if got := store.ResolveRepositoryURL("/home/makoto/codex-manager/internal/web"); got != "https://github.com/makoto-soracom/codex-manager.git" { + t.Fatalf("expected codex-manager longest prefix override, got %q", got) + } + if got := store.ResolveRepositoryURL("/home/makoto/codex-manager/docs/guides"); got != "https://github.com/makoto-soracom/codex-manager-docs.git" { + t.Fatalf("expected docs override, got %q", got) + } + if got := store.ResolveRepositoryURL("/home/makoto/other"); got != "https://github.com/example/root.git" { + t.Fatalf("expected parent override, got %q", got) + } + if got := store.ResolveRepositoryURL("/home/makoto/codex-manager-extra"); got != "https://github.com/example/root.git" { + t.Fatalf("expected prefix boundary handling, got %q", got) + } +} + +func TestDefaultPath(t *testing.T) { + got := DefaultPath("/tmp/codex/sessions") + want := filepath.Join("/tmp/codex", "session_repository_overrides.json") + if got != want { + t.Fatalf("expected %q, got %q", want, got) + } +} diff --git a/internal/search/index.go b/internal/search/index.go index ab80635..98ef0d5 100644 --- a/internal/search/index.go +++ b/internal/search/index.go @@ -5,6 +5,7 @@ import ( "strings" "sync" "time" + "unicode/utf8" "codex-manager/internal/sessions" ) @@ -14,39 +15,63 @@ const ( maxLimit = 200 snippetRadius = 60 snippetMax = 180 + contextMax = 140 ) // Result describes a single search match. type Result struct { - Date string `json:"date"` - Timestamp string `json:"timestamp"` - Cwd string `json:"cwd"` - Path string `json:"path"` - File string `json:"file"` - Line int `json:"line"` - Role string `json:"role"` - Preview string `json:"preview"` + Date string `json:"date"` + Timestamp string `json:"timestamp"` + Cwd string `json:"cwd"` + Path string `json:"path"` + File string `json:"file"` + DisplayFile string `json:"displayFile"` + Line int `json:"line"` + Role string `json:"role"` + Preview string `json:"preview"` + PrevUser string `json:"prevUser"` + NextAssistant string `json:"nextAssistant"` + PrevUserLine int `json:"prevUserLine"` + NextAssistantLine int `json:"nextAssistantLine"` sortTime time.Time } type entry struct { - date string - timestamp string - sortTime time.Time - cwd string - path string - file string - line int - role string - content string - lower string + date string + timestamp string + sortTime time.Time + cwd string + path string + file string + displayFile string + line int + role string + content string + lower string + prevUser string + nextAsst string + prevLine int + nextLine int +} + +type threadPairKey struct { + path string + file string + userLine int + assistantLine int +} + +type threadPairState struct { + hasUserHit bool + hasAssistantHit bool } type fileIndex struct { - size int64 - modTime time.Time - entries []entry + size int64 + modTime time.Time + threadName string + entries []entry } // Index stores a searchable snapshot of sessions. @@ -77,7 +102,7 @@ func (idx *Index) RefreshFrom(sessionsIdx *sessions.Index) error { toParse := make([]sessions.SessionFile, 0) for _, file := range files { key := file.Path - if meta, ok := existing[key]; ok && meta.size == file.Size && meta.modTime.Equal(file.ModTime) { + if meta, ok := existing[key]; ok && meta.size == file.Size && meta.modTime.Equal(file.ModTime) && meta.threadName == file.ThreadName { next[key] = meta continue } @@ -96,7 +121,7 @@ func (idx *Index) RefreshFrom(sessionsIdx *sessions.Index) error { } continue } - next[file.Path] = fileIndex{size: file.Size, modTime: file.ModTime, entries: entries} + next[file.Path] = fileIndex{size: file.Size, modTime: file.ModTime, threadName: file.ThreadName, entries: entries} } ordered := make([]entry, 0) @@ -118,6 +143,12 @@ func (idx *Index) RefreshFrom(sessionsIdx *sessions.Index) error { // Search returns the first N matches for the query. func (idx *Index) Search(query string, limit int) []Result { + return idx.SearchWithCwd(query, limit, "") +} + +// SearchWithCwd returns the first N matches for the query filtered by cwd. +// If cwdFilter is empty, it behaves like Search. +func (idx *Index) SearchWithCwd(query string, limit int, cwdFilter string) []Result { q := strings.TrimSpace(query) if q == "" { return nil @@ -128,40 +159,143 @@ func (idx *Index) Search(query string, limit int) []Result { if limit > maxLimit { limit = maxLimit } + cwdFilter = normalizeCwdFilter(cwdFilter) lower := strings.ToLower(q) idx.mu.RLock() defer idx.mu.RUnlock() - results := make([]Result, 0, limit) + matches := make([]entry, 0, limit) + pairStates := make(map[threadPairKey]threadPairState) for _, item := range idx.ordered { - matchIndex := strings.Index(item.lower, lower) - if matchIndex == -1 { + if !matchesCwdFilter(item.cwd, cwdFilter) { continue } - preview := makePreview(item.content, matchIndex, len(q)) - results = append(results, Result{ - Date: item.date, - Timestamp: item.timestamp, - Cwd: item.cwd, - Path: item.path, - File: item.file, - Line: item.line, - Role: item.role, - Preview: preview, - sortTime: item.sortTime, - }) + if strings.Index(item.lower, lower) == -1 { + continue + } + matches = append(matches, item) + if key, ok := pairKeyForEntry(item); ok { + state := pairStates[key] + switch item.role { + case "user": + state.hasUserHit = true + case "assistant": + state.hasAssistantHit = true + } + pairStates[key] = state + } } - sort.SliceStable(results, func(i, j int) bool { - return results[i].sortTime.After(results[j].sortTime) + sort.SliceStable(matches, func(i, j int) bool { + return matches[i].sortTime.After(matches[j].sortTime) }) - if len(results) > limit { - results = results[:limit] + + results := make([]Result, 0, limit) + for _, item := range matches { + if shouldSkipAssistantDuplicate(item, pairStates) { + continue + } + preview := makePreview(item.content, q) + results = append(results, Result{ + Date: item.date, + Timestamp: item.timestamp, + Cwd: item.cwd, + Path: item.path, + File: item.file, + DisplayFile: item.displayFile, + Line: item.line, + Role: item.role, + Preview: preview, + PrevUser: item.prevUser, + NextAssistant: item.nextAsst, + PrevUserLine: item.prevLine, + NextAssistantLine: item.nextLine, + sortTime: item.sortTime, + }) + if len(results) >= limit { + break + } } + return results } +func pairKeyForEntry(item entry) (threadPairKey, bool) { + switch item.role { + case "user": + if item.nextLine <= 0 { + return threadPairKey{}, false + } + return threadPairKey{ + path: item.path, + file: item.file, + userLine: item.line, + assistantLine: item.nextLine, + }, true + case "assistant": + if item.prevLine <= 0 { + return threadPairKey{}, false + } + return threadPairKey{ + path: item.path, + file: item.file, + userLine: item.prevLine, + assistantLine: item.line, + }, true + default: + return threadPairKey{}, false + } +} + +func shouldSkipAssistantDuplicate(item entry, pairStates map[threadPairKey]threadPairState) bool { + if item.role != "assistant" { + return false + } + key, ok := pairKeyForEntry(item) + if !ok { + return false + } + state, ok := pairStates[key] + if !ok { + return false + } + return state.hasUserHit && state.hasAssistantHit +} + +func normalizeCwdFilter(value string) string { + value = strings.TrimSpace(value) + if value == "" { + return "" + } + if value != "/" && strings.HasSuffix(value, "/") { + value = strings.TrimRight(value, "/") + } + if value != "\\" && strings.HasSuffix(value, "\\") { + value = strings.TrimRight(value, "\\") + } + return value +} + +func matchesCwdFilter(itemCwd string, cwdFilter string) bool { + if cwdFilter == "" { + return true + } + if itemCwd == "" { + return false + } + if itemCwd == cwdFilter { + return true + } + if cwdFilter == "/" { + return strings.HasPrefix(itemCwd, "/") + } + if cwdFilter == "\\" { + return strings.HasPrefix(itemCwd, "\\") + } + return strings.HasPrefix(itemCwd, cwdFilter+"/") || strings.HasPrefix(itemCwd, cwdFilter+"\\") +} + func buildEntries(file sessions.SessionFile) ([]entry, error) { session, err := sessions.ParseSession(file.Path) if err != nil { @@ -171,6 +305,7 @@ func buildEntries(file sessions.SessionFile) ([]entry, error) { entries := make([]entry, 0, len(session.Items)) dateLabel := file.Date.String() datePath := file.Date.Path() + displayFile := file.DisplayName() cwd := "" if session.Meta != nil && session.Meta.Cwd != "" { cwd = session.Meta.Cwd @@ -178,64 +313,143 @@ func buildEntries(file sessions.SessionFile) ([]entry, error) { cwd = file.Meta.Cwd } cwd = sessions.NormalizeCwd(cwd) - for _, item := range session.Items { + + prevUser := make([]string, len(session.Items)) + prevUserLine := make([]int, len(session.Items)) + lastUser := "" + lastUserLine := 0 + for i, item := range session.Items { + prevUser[i] = lastUser + prevUserLine[i] = lastUserLine + content := strings.TrimSpace(item.Content) + if content == "" { + continue + } + if item.Role == "user" && !sessions.IsAutoContextUserMessage(item.Content) { + lastUser = makeContextSnippet(content) + lastUserLine = item.Line + } + } + + nextAssistant := make([]string, len(session.Items)) + nextAssistantLine := make([]int, len(session.Items)) + nextAsst := "" + nextAsstLine := 0 + for i := len(session.Items) - 1; i >= 0; i-- { + nextAssistant[i] = nextAsst + nextAssistantLine[i] = nextAsstLine + content := strings.TrimSpace(session.Items[i].Content) + if content == "" { + continue + } + if session.Items[i].Role == "assistant" { + nextAsst = makeContextSnippet(content) + nextAsstLine = session.Items[i].Line + } + } + + for i, item := range session.Items { content := strings.TrimSpace(item.Content) if content == "" { continue } timestamp := parseTimestamp(item.Timestamp, file.ModTime) entries = append(entries, entry{ - date: dateLabel, - timestamp: formatTimestamp(timestamp), - sortTime: timestamp, - cwd: cwd, - path: datePath, - file: file.Name, - line: item.Line, - role: item.Role, - content: content, - lower: strings.ToLower(content), + date: dateLabel, + timestamp: formatTimestamp(timestamp), + sortTime: timestamp, + cwd: cwd, + path: datePath, + file: file.Name, + displayFile: displayFile, + line: item.Line, + role: item.Role, + content: content, + lower: strings.ToLower(content), + prevUser: prevUser[i], + nextAsst: nextAssistant[i], + prevLine: prevUserLine[i], + nextLine: nextAssistantLine[i], }) } return entries, nil } -func makePreview(content string, matchIndex int, queryLen int) string { +func makePreview(content string, query string) string { cleaned := strings.ReplaceAll(content, "\r", " ") cleaned = strings.ReplaceAll(cleaned, "\n", " ") cleaned = strings.TrimSpace(cleaned) if cleaned == "" { return "" } - if matchIndex < 0 || matchIndex >= len(cleaned) || queryLen <= 0 { - return truncate(cleaned, snippetMax) + query = strings.TrimSpace(query) + if query == "" { + return truncateRunes(cleaned, snippetMax) + } + + lowerCleaned := strings.ToLower(cleaned) + lowerQuery := strings.ToLower(query) + matchIndex := strings.Index(lowerCleaned, lowerQuery) + if matchIndex == -1 { + return truncateRunes(cleaned, snippetMax) + } + + matchRuneIndex := runeOffsetForByteIndex(lowerCleaned, matchIndex) + queryRuneLen := utf8.RuneCountInString(lowerQuery) + if queryRuneLen <= 0 { + return truncateRunes(cleaned, snippetMax) } - start := matchIndex - snippetRadius + + runes := []rune(cleaned) + start := matchRuneIndex - snippetRadius if start < 0 { start = 0 } - end := matchIndex + queryLen + snippetRadius - if end > len(cleaned) { - end = len(cleaned) + end := matchRuneIndex + queryRuneLen + snippetRadius + if end > len(runes) { + end = len(runes) } - snippet := strings.TrimSpace(cleaned[start:end]) + snippet := strings.TrimSpace(string(runes[start:end])) if start > 0 { snippet = "..." + snippet } - if end < len(cleaned) { + if end < len(runes) { snippet = snippet + "..." } return snippet } -func truncate(value string, max int) string { - if len(value) <= max { +func makeContextSnippet(value string) string { + value = strings.TrimSpace(value) + if value == "" { + return "" + } + value = strings.Join(strings.Fields(value), " ") + return truncateRunes(value, contextMax) +} + +func truncateRunes(value string, max int) string { + if max <= 0 { + return value + } + runes := []rune(value) + if len(runes) <= max { return value } - if max <= 3 { - return value[:max] + if max > 3 { + return string(runes[:max-3]) + "..." + } + return string(runes[:max]) +} + +func runeOffsetForByteIndex(value string, byteIndex int) int { + if byteIndex <= 0 { + return 0 + } + if byteIndex >= len(value) { + return utf8.RuneCountInString(value) } - return value[:max-3] + "..." + return utf8.RuneCountInString(value[:byteIndex]) } func parseTimestamp(value string, fallback time.Time) time.Time { diff --git a/internal/search/index_test.go b/internal/search/index_test.go index 0b699eb..0c538b2 100644 --- a/internal/search/index_test.go +++ b/internal/search/index_test.go @@ -5,6 +5,7 @@ import ( "path/filepath" "strings" "testing" + "unicode/utf8" "codex-manager/internal/sessions" ) @@ -63,6 +64,137 @@ func TestIndexSearch(t *testing.T) { } } +func TestSearchDeduplicatesConsecutiveUserAssistantHits(t *testing.T) { + baseDir := t.TempDir() + writeSessionFile(t, baseDir, "2024/01/02/consecutive.jsonl", []string{ + `{"timestamp":"2024-01-02T00:00:01Z","type":"response_item","payload":{"type":"message","role":"user","content":[{"type":"text","text":"キーワード を含む質問"}]}}`, + `{"timestamp":"2024-01-02T00:00:02Z","type":"response_item","payload":{"type":"message","role":"assistant","content":[{"type":"text","text":"キーワード を含む回答"}]}}`, + }) + + idx := sessions.NewIndex(baseDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + searchIdx := NewIndex() + if err := searchIdx.RefreshFrom(idx); err != nil { + t.Fatalf("search refresh: %v", err) + } + + results := searchIdx.Search("キーワード", 10) + if len(results) != 1 { + t.Fatalf("expected 1 deduplicated result, got %d", len(results)) + } + if results[0].Role != "user" { + t.Fatalf("expected user result to be kept, got %q", results[0].Role) + } + if results[0].Line != 1 { + t.Fatalf("expected user line 1, got %d", results[0].Line) + } + if results[0].NextAssistantLine != 2 { + t.Fatalf("expected next assistant line 2, got %d", results[0].NextAssistantLine) + } +} + +func TestSearchKeepsAssistantOnlyHit(t *testing.T) { + baseDir := t.TempDir() + writeSessionFile(t, baseDir, "2024/01/02/assistant-only.jsonl", []string{ + `{"timestamp":"2024-01-02T00:00:01Z","type":"response_item","payload":{"type":"message","role":"user","content":[{"type":"text","text":"質問だけ"}]}}`, + `{"timestamp":"2024-01-02T00:00:02Z","type":"response_item","payload":{"type":"message","role":"assistant","content":[{"type":"text","text":"キーワード を含む回答"}]}}`, + }) + + idx := sessions.NewIndex(baseDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + searchIdx := NewIndex() + if err := searchIdx.RefreshFrom(idx); err != nil { + t.Fatalf("search refresh: %v", err) + } + + results := searchIdx.Search("キーワード", 10) + if len(results) != 1 { + t.Fatalf("expected 1 assistant-only result, got %d", len(results)) + } + if results[0].Role != "assistant" { + t.Fatalf("expected assistant result, got %q", results[0].Role) + } + if results[0].Line != 2 { + t.Fatalf("expected assistant line 2, got %d", results[0].Line) + } +} + +func TestSearchIncludesDisplayFile(t *testing.T) { + root := t.TempDir() + sessionsDir := filepath.Join(root, "sessions") + indexPath := filepath.Join(root, "session_index.jsonl") + if err := os.WriteFile(indexPath, []byte("{\"id\":\"session-1\",\"thread_name\":\"pr461 #3\",\"updated_at\":\"2026-03-13T06:09:42Z\"}\n"), 0o600); err != nil { + t.Fatalf("write session index: %v", err) + } + + writeSessionFile(t, sessionsDir, "2026/03/13/rollout-2026-03-13T13-36-02-session-1.jsonl", []string{ + `{"timestamp":"2026-03-13T00:00:00Z","type":"session_meta","payload":{"id":"session-1","timestamp":"2026-03-13T00:00:00Z","cwd":"/tmp","originator":"cli","cli_version":"0.1","source":"cli"}}`, + `{"timestamp":"2026-03-13T00:00:01Z","type":"response_item","payload":{"type":"message","role":"user","content":[{"type":"text","text":"Please check the display name"}]}}`, + }) + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + searchIdx := NewIndex() + if err := searchIdx.RefreshFrom(idx); err != nil { + t.Fatalf("search refresh: %v", err) + } + + results := searchIdx.Search("display name", 10) + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + want := "pr461 #3 (rollout-2026-03-13T13-36-02-session-1.jsonl)" + if results[0].DisplayFile != want { + t.Fatalf("expected display file %q, got %q", want, results[0].DisplayFile) + } + + if err := os.WriteFile(indexPath, []byte("{\"id\":\"session-1\",\"thread_name\":\"pr461 #4\",\"updated_at\":\"2026-03-13T07:09:42Z\"}\n"), 0o600); err != nil { + t.Fatalf("rewrite session index: %v", err) + } + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh after session index update: %v", err) + } + if err := searchIdx.RefreshFrom(idx); err != nil { + t.Fatalf("search refresh after session index update: %v", err) + } + + results = searchIdx.Search("display name", 10) + if len(results) != 1 { + t.Fatalf("expected 1 result after update, got %d", len(results)) + } + want = "pr461 #4 (rollout-2026-03-13T13-36-02-session-1.jsonl)" + if results[0].DisplayFile != want { + t.Fatalf("expected updated display file %q, got %q", want, results[0].DisplayFile) + } +} + +func TestMakePreviewMultibyteBoundarySafe(t *testing.T) { + content := strings.Repeat("あ", 120) + "キーワード" + strings.Repeat("い", 120) + preview := makePreview(content, "キーワード") + + if preview == "" { + t.Fatalf("expected preview, got empty") + } + if !utf8.ValidString(preview) { + t.Fatalf("expected valid UTF-8 preview, got %q", preview) + } + if strings.ContainsRune(preview, '\uFFFD') { + t.Fatalf("expected preview without replacement rune, got %q", preview) + } + if !strings.Contains(preview, "キーワード") { + t.Fatalf("expected preview to contain query, got %q", preview) + } +} + func writeSessionFile(t *testing.T, baseDir, relPath string, lines []string) { t.Helper() fullPath := filepath.Join(baseDir, filepath.FromSlash(relPath)) diff --git a/internal/sessions/display.go b/internal/sessions/display.go new file mode 100644 index 0000000..53e9def --- /dev/null +++ b/internal/sessions/display.go @@ -0,0 +1,17 @@ +package sessions + +import "strings" + +// SessionDisplayName returns the user-facing label for a session file. +func SessionDisplayName(filename, threadName string) string { + threadName = strings.TrimSpace(threadName) + if threadName == "" { + return filename + } + return threadName + " (" + filename + ")" +} + +// DisplayName returns the user-facing label for a session file. +func (f SessionFile) DisplayName() string { + return SessionDisplayName(f.Name, f.ThreadName) +} diff --git a/internal/sessions/index.go b/internal/sessions/index.go index ac36191..d8a4eec 100644 --- a/internal/sessions/index.go +++ b/internal/sessions/index.go @@ -1,6 +1,8 @@ package sessions import ( + "bufio" + "encoding/json" "errors" "io/fs" "os" @@ -30,31 +32,36 @@ func (d DateKey) Path() string { // SessionFile represents a jsonl file on disk. type SessionFile struct { - Date DateKey - Name string - Path string - Size int64 - ModTime time.Time - Meta *SessionMeta + Date DateKey + Name string + Path string + Size int64 + ModTime time.Time + Meta *SessionMeta + ThreadName string } // Index stores a snapshot of sessions on disk. type Index struct { - baseDir string - mu sync.RWMutex - byDate map[DateKey][]SessionFile - byName map[string]SessionFile - byCwd map[string][]SessionFile - updated time.Time + baseDir string + mu sync.RWMutex + byDate map[DateKey][]SessionFile + byName map[string]SessionFile + byID map[string]SessionFile + byCwd map[string][]SessionFile + threadNames map[string]string + updated time.Time } // NewIndex creates an empty index. func NewIndex(baseDir string) *Index { return &Index{ - baseDir: baseDir, - byDate: map[DateKey][]SessionFile{}, - byName: map[string]SessionFile{}, - byCwd: map[string][]SessionFile{}, + baseDir: baseDir, + byDate: map[DateKey][]SessionFile{}, + byName: map[string]SessionFile{}, + byID: map[string]SessionFile{}, + byCwd: map[string][]SessionFile{}, + threadNames: map[string]string{}, } } @@ -81,7 +88,9 @@ func (idx *Index) Refresh() error { byDate := map[DateKey][]SessionFile{} byName := map[string]SessionFile{} + byID := map[string]SessionFile{} byCwd := map[string][]SessionFile{} + threadNames := loadThreadNames(sessionIndexPath(idx.baseDir)) walkErr := filepath.WalkDir(idx.baseDir, func(fullPath string, d fs.DirEntry, err error) error { if err != nil { @@ -125,9 +134,15 @@ func (idx *Index) Refresh() error { ModTime: info.ModTime(), Meta: meta, } + if meta != nil && meta.ID != "" { + file.ThreadName = threadNames[meta.ID] + } byDate[date] = append(byDate[date], file) byName[path.Join(date.Path(), file.Name)] = file + if file.Meta != nil && file.Meta.ID != "" { + byID[file.Meta.ID] = file + } cwd := CwdForFile(file) byCwd[cwd] = append(byCwd[cwd], file) return nil @@ -150,7 +165,9 @@ func (idx *Index) Refresh() error { idx.mu.Lock() idx.byDate = byDate idx.byName = byName + idx.byID = byID idx.byCwd = byCwd + idx.threadNames = threadNames idx.updated = time.Now() idx.mu.Unlock() return nil @@ -232,6 +249,21 @@ func (idx *Index) Lookup(date DateKey, filename string) (SessionFile, bool) { return file, ok } +// LookupByID returns the file for a session ID. +func (idx *Index) LookupByID(id string) (SessionFile, bool) { + idx.mu.RLock() + defer idx.mu.RUnlock() + file, ok := idx.byID[id] + return file, ok +} + +// ThreadName returns the latest known thread name for a session ID. +func (idx *Index) ThreadName(id string) string { + idx.mu.RLock() + defer idx.mu.RUnlock() + return idx.threadNames[id] +} + func ParseDate(year, month, day string) (DateKey, bool) { if len(year) != 4 || len(month) != 2 || len(day) != 2 { return DateKey{}, false @@ -269,3 +301,102 @@ func dateGreater(a, b DateKey) bool { } return a.String() > b.String() } + +type sessionIndexEntry struct { + ID string `json:"id"` + ThreadName string `json:"thread_name"` + UpdatedAt string `json:"updated_at"` +} + +type sessionThreadName struct { + threadName string + updatedAt time.Time + hasUpdated bool + line int +} + +func sessionIndexPath(baseDir string) string { + cleanBaseDir := filepath.Clean(baseDir) + return filepath.Join(filepath.Dir(cleanBaseDir), "session_index.jsonl") +} + +func loadThreadNames(indexPath string) map[string]string { + file, err := os.Open(indexPath) + if err != nil { + return map[string]string{} + } + defer file.Close() + + scanner := bufio.NewScanner(file) + scanner.Buffer(make([]byte, 0, 64*1024), 1024*1024) + + entries := map[string]sessionThreadName{} + lineNum := 0 + for scanner.Scan() { + lineNum++ + lineText := strings.TrimSpace(scanner.Text()) + if lineText == "" { + continue + } + + var entry sessionIndexEntry + if err := json.Unmarshal([]byte(lineText), &entry); err != nil { + continue + } + + id := strings.TrimSpace(entry.ID) + threadName := strings.TrimSpace(entry.ThreadName) + if id == "" || threadName == "" { + continue + } + + updatedAt, hasUpdated := parseSessionIndexUpdatedAt(entry.UpdatedAt) + current, ok := entries[id] + if ok && !shouldReplaceThreadName(current, updatedAt, hasUpdated, lineNum) { + continue + } + entries[id] = sessionThreadName{ + threadName: threadName, + updatedAt: updatedAt, + hasUpdated: hasUpdated, + line: lineNum, + } + } + + out := make(map[string]string, len(entries)) + for id, entry := range entries { + out[id] = entry.threadName + } + return out +} + +func parseSessionIndexUpdatedAt(value string) (time.Time, bool) { + value = strings.TrimSpace(value) + if value == "" { + return time.Time{}, false + } + parsed, err := time.Parse(time.RFC3339Nano, value) + if err != nil { + return time.Time{}, false + } + return parsed, true +} + +func shouldReplaceThreadName(current sessionThreadName, updatedAt time.Time, hasUpdated bool, lineNum int) bool { + if hasUpdated { + if !current.hasUpdated { + return true + } + if updatedAt.After(current.updatedAt) { + return true + } + if updatedAt.Equal(current.updatedAt) { + return lineNum > current.line + } + return false + } + if current.hasUpdated { + return false + } + return lineNum > current.line +} diff --git a/internal/sessions/index_test.go b/internal/sessions/index_test.go index 88b0dff..c5769c9 100644 --- a/internal/sessions/index_test.go +++ b/internal/sessions/index_test.go @@ -51,3 +51,114 @@ func TestIndexRefreshAndLookup(t *testing.T) { t.Fatalf("unexpected path: %s", lookup.Path) } } + +func TestIndexLookupByID(t *testing.T) { + base := t.TempDir() + pathA := filepath.Join(base, "2026", "03", "13") + if err := os.MkdirAll(pathA, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + filePath := filepath.Join(pathA, "session-a.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-13T00:25:44Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"agent-1\",\"timestamp\":\"2026-03-13T00:25:44Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\",\"source\":\"cli\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:25:45Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"assistant\",\"content\":[{\"type\":\"output_text\",\"text\":\"hello\"}]}}\n" + if err := os.WriteFile(filePath, []byte(data), 0o644); err != nil { + t.Fatalf("write: %v", err) + } + + idx := NewIndex(base) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + lookup, ok := idx.LookupByID("agent-1") + if !ok { + t.Fatalf("expected lookup by id to succeed") + } + if lookup.Path != filePath { + t.Fatalf("unexpected path: %s", lookup.Path) + } +} + +func TestIndexRefreshCapturesGitBranchFromSessionMeta(t *testing.T) { + base := t.TempDir() + pathA := filepath.Join(base, "2026", "03", "19") + if err := os.MkdirAll(pathA, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + filePath := filepath.Join(pathA, "session-a.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-19T00:25:44Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"agent-1\",\"timestamp\":\"2026-03-19T00:25:44Z\",\"cwd\":\"/tmp\",\"git\":{\"branch\":\"feature/session-branch\",\"commit_hash\":\"abc123\"},\"originator\":\"cli\",\"cli_version\":\"0.1\",\"source\":\"cli\"}}\n" + + "{\"timestamp\":\"2026-03-19T00:25:45Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"assistant\",\"content\":[{\"type\":\"output_text\",\"text\":\"hello\"}]}}\n" + if err := os.WriteFile(filePath, []byte(data), 0o644); err != nil { + t.Fatalf("write: %v", err) + } + + idx := NewIndex(base) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + date, ok := ParseDate("2026", "03", "19") + if !ok { + t.Fatal("expected valid date") + } + lookup, ok := idx.Lookup(date, "session-a.jsonl") + if !ok { + t.Fatalf("expected lookup to succeed") + } + if lookup.Meta == nil { + t.Fatal("expected meta") + } + if lookup.Meta.GitBranch() != "feature/session-branch" { + t.Fatalf("expected git branch, got %q", lookup.Meta.GitBranch()) + } +} + +func TestIndexRefreshUsesLatestThreadName(t *testing.T) { + root := t.TempDir() + sessionsDir := filepath.Join(root, "sessions") + datePath := filepath.Join(sessionsDir, "2026", "03", "13") + if err := os.MkdirAll(datePath, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + indexPath := filepath.Join(root, "session_index.jsonl") + indexData := "" + + "{\"id\":\"session-1\",\"thread_name\":\"old name\",\"updated_at\":\"2026-03-12T00:00:00Z\"}\n" + + "{\"id\":\"session-1\",\"thread_name\":\"new name\",\"updated_at\":\"2026-03-13T00:00:00Z\"}\n" + if err := os.WriteFile(indexPath, []byte(indexData), 0o644); err != nil { + t.Fatalf("write session index: %v", err) + } + + filePath := filepath.Join(datePath, "rollout-2026-03-13T13-36-02-session-1.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-13T00:25:44Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-1\",\"timestamp\":\"2026-03-13T00:25:44Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\",\"source\":\"cli\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:25:45Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"assistant\",\"content\":[{\"type\":\"output_text\",\"text\":\"hello\"}]}}\n" + if err := os.WriteFile(filePath, []byte(data), 0o644); err != nil { + t.Fatalf("write session: %v", err) + } + + idx := NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + date, ok := ParseDate("2026", "03", "13") + if !ok { + t.Fatal("expected valid date") + } + file, ok := idx.Lookup(date, "rollout-2026-03-13T13-36-02-session-1.jsonl") + if !ok { + t.Fatal("expected lookup to succeed") + } + if file.ThreadName != "new name" { + t.Fatalf("expected latest thread name, got %q", file.ThreadName) + } + if file.DisplayName() != "new name (rollout-2026-03-13T13-36-02-session-1.jsonl)" { + t.Fatalf("unexpected display name: %q", file.DisplayName()) + } + if got := idx.ThreadName("session-1"); got != "new name" { + t.Fatalf("expected thread name lookup, got %q", got) + } +} diff --git a/internal/sessions/meta.go b/internal/sessions/meta.go index 9391bdb..52bb6b6 100644 --- a/internal/sessions/meta.go +++ b/internal/sessions/meta.go @@ -84,18 +84,25 @@ func metaFromLine(lineText string) *SessionMeta { if err := json.Unmarshal([]byte(lineText), &metaLine); err != nil { return nil } - if metaLine.ID == "" && metaLine.Timestamp == "" && metaLine.Cwd == "" && metaLine.Originator == "" && metaLine.CliVersion == "" && metaLine.Instructions == nil { + if metaLine.ID == "" && metaLine.ForkedFromID == "" && metaLine.Timestamp == "" && metaLine.Cwd == "" && metaLine.Git == nil && metaLine.Originator == "" && metaLine.CliVersion == "" && metaLine.AgentNickname == "" && metaLine.AgentRole == "" && metaLine.Source == nil && metaLine.Instructions == nil && metaLine.BaseInstructions == nil { return nil } out := SessionMeta{ - ID: metaLine.ID, - Timestamp: metaLine.Timestamp, - Cwd: metaLine.Cwd, - Originator: metaLine.Originator, - CliVersion: metaLine.CliVersion, + ID: metaLine.ID, + ForkedFromID: metaLine.ForkedFromID, + Timestamp: metaLine.Timestamp, + Cwd: metaLine.Cwd, + Git: metaLine.Git, + Originator: metaLine.Originator, + CliVersion: metaLine.CliVersion, + AgentNickname: metaLine.AgentNickname, + AgentRole: metaLine.AgentRole, + Source: metaLine.Source, } if metaLine.Instructions != nil { out.Instructions = *metaLine.Instructions + } else if metaLine.BaseInstructions != nil { + out.Instructions = metaLine.BaseInstructions.Text } return &out } diff --git a/internal/sessions/parser.go b/internal/sessions/parser.go index e87bb0d..393ff40 100644 --- a/internal/sessions/parser.go +++ b/internal/sessions/parser.go @@ -4,6 +4,8 @@ import ( "bufio" "bytes" "encoding/json" + "fmt" + "html" "io" "os" "strings" @@ -11,32 +13,88 @@ import ( // Session represents a parsed conversation file. type Session struct { - Path string - Meta *SessionMeta - Items []RenderItem + Path string + Meta *SessionMeta + Items []RenderItem + subagentRequests map[string]string + subagentNicknames map[string]string + spawnRequestByCall map[string]string } // SessionMeta holds metadata from session_meta entries. type SessionMeta struct { - ID string `json:"id"` - Timestamp string `json:"timestamp"` - Cwd string `json:"cwd"` - Originator string `json:"originator"` - CliVersion string `json:"cli_version"` - Instructions string `json:"instructions"` + ID string `json:"id"` + ForkedFromID string `json:"forked_from_id,omitempty"` + Timestamp string `json:"timestamp"` + Cwd string `json:"cwd"` + Git *sessionMetaGit `json:"git,omitempty"` + Originator string `json:"originator"` + CliVersion string `json:"cli_version"` + Instructions string `json:"instructions"` + AgentNickname string `json:"agent_nickname,omitempty"` + AgentRole string `json:"agent_role,omitempty"` + Source *sessionMetaSource `json:"source,omitempty"` + BaseInstructions *instructionText `json:"base_instructions,omitempty"` +} + +type sessionMetaSource struct { + Subagent *sessionMetaSubagentSource `json:"subagent,omitempty"` +} + +type sessionMetaSubagentSource struct { + ThreadSpawn *sessionMetaThreadSpawn `json:"thread_spawn,omitempty"` +} + +type sessionMetaThreadSpawn struct { + ParentThreadID string `json:"parent_thread_id,omitempty"` + Depth int `json:"depth,omitempty"` + AgentNickname string `json:"agent_nickname,omitempty"` + AgentRole string `json:"agent_role,omitempty"` +} + +type sessionMetaGit struct { + CommitHash string `json:"commit_hash,omitempty"` + Branch string `json:"branch,omitempty"` + RepositoryURL string `json:"repository_url,omitempty"` +} + +func (s *sessionMetaSource) UnmarshalJSON(data []byte) error { + trimmed := bytes.TrimSpace(data) + if len(trimmed) == 0 || bytes.Equal(trimmed, []byte("null")) { + return nil + } + if trimmed[0] == '"' { + *s = sessionMetaSource{} + return nil + } + type alias sessionMetaSource + var decoded alias + if err := json.Unmarshal(trimmed, &decoded); err != nil { + return err + } + *s = sessionMetaSource(decoded) + return nil } // RenderItem is a display-ready entry for the HTML view. type RenderItem struct { - Line int - Timestamp string - Type string - Subtype string - Role string - Title string - Content string - Raw string - Class string + Line int + Timestamp string + Type string + Subtype string + CallID string + ToolName string + ToolStatus string + ToolInput string + Role string + Title string + Content string + Raw string + Class string + SubagentID string + SubagentNickname string + SubagentStatusType string + SubagentRequest string } type envelope struct { @@ -58,6 +116,9 @@ type responseItemPayload struct { Arguments string `json:"arguments"` CallID string `json:"call_id"` Output string `json:"output"` + Status string `json:"status"` + Input string `json:"input"` + Action json.RawMessage `json:"action"` } type eventMsgPayload struct { @@ -65,6 +126,10 @@ type eventMsgPayload struct { Message string `json:"message"` } +type instructionText struct { + Text string `json:"text"` +} + type directMessagePayload struct { Type string `json:"type"` Role string `json:"role"` @@ -72,12 +137,77 @@ type directMessagePayload struct { } type metaLinePayload struct { - ID string `json:"id"` - Timestamp string `json:"timestamp"` - Cwd string `json:"cwd"` - Originator string `json:"originator"` - CliVersion string `json:"cli_version"` - Instructions *string `json:"instructions"` + ID string `json:"id"` + ForkedFromID string `json:"forked_from_id,omitempty"` + Timestamp string `json:"timestamp"` + Cwd string `json:"cwd"` + Git *sessionMetaGit `json:"git,omitempty"` + Originator string `json:"originator"` + CliVersion string `json:"cli_version"` + AgentNickname string `json:"agent_nickname,omitempty"` + AgentRole string `json:"agent_role,omitempty"` + Source *sessionMetaSource `json:"source,omitempty"` + Instructions *string `json:"instructions"` + BaseInstructions *instructionText `json:"base_instructions"` +} + +func (m *SessionMeta) ParentThreadID() string { + if m == nil { + return "" + } + if m.Source != nil && m.Source.Subagent != nil && m.Source.Subagent.ThreadSpawn != nil { + if id := strings.TrimSpace(m.Source.Subagent.ThreadSpawn.ParentThreadID); id != "" { + return id + } + } + return strings.TrimSpace(m.ForkedFromID) +} + +func (m *SessionMeta) SubagentNicknameValue() string { + if m == nil { + return "" + } + if nickname := strings.TrimSpace(m.AgentNickname); nickname != "" { + return nickname + } + if m.Source != nil && m.Source.Subagent != nil && m.Source.Subagent.ThreadSpawn != nil { + return strings.TrimSpace(m.Source.Subagent.ThreadSpawn.AgentNickname) + } + return "" +} + +func (m *SessionMeta) SubagentRoleValue() string { + if m == nil { + return "" + } + if role := strings.TrimSpace(m.AgentRole); role != "" { + return role + } + if m.Source != nil && m.Source.Subagent != nil && m.Source.Subagent.ThreadSpawn != nil { + return strings.TrimSpace(m.Source.Subagent.ThreadSpawn.AgentRole) + } + return "" +} + +func (m *SessionMeta) IsSubagentThread() bool { + if m == nil { + return false + } + return m.ParentThreadID() != "" || m.SubagentNicknameValue() != "" || m.SubagentRoleValue() != "" +} + +func (m *SessionMeta) GitBranch() string { + if m == nil || m.Git == nil { + return "" + } + return strings.TrimSpace(m.Git.Branch) +} + +func (m *SessionMeta) GitRepositoryURL() string { + if m == nil || m.Git == nil { + return "" + } + return strings.TrimSpace(m.Git.RepositoryURL) } // ParseSession reads a jsonl file and returns a parsed Session. @@ -88,7 +218,12 @@ func ParseSession(path string) (*Session, error) { } defer file.Close() - session := &Session{Path: path} + session := &Session{ + Path: path, + subagentRequests: map[string]string{}, + subagentNicknames: map[string]string{}, + spawnRequestByCall: map[string]string{}, + } reader := bufio.NewReader(file) lineNum := 0 @@ -156,12 +291,47 @@ func parseResponseItem(env envelope, lineText string, lineNum int, session *Sess Timestamp: env.Timestamp, Type: env.Type, Subtype: payload.Type, + CallID: payload.CallID, Role: payload.Role, Title: titleForType(env.Type, payload.Type), Raw: lineText, } switch payload.Type { + case "function_call": + if payload.Name == "spawn_agent" && session != nil && payload.CallID != "" { + if request := extractSpawnAgentRequest(payload.Arguments); request != "" { + session.spawnRequestByCall[payload.CallID] = request + } + } + item.ToolName = payload.Name + item.ToolInput = payload.Arguments + item.Role = "tool" + item.Class = roleClass("tool") + item.Content = renderFunctionCallContent(payload.Name, payload.CallID, payload.Arguments) + case "function_call_output": + if session != nil && payload.CallID != "" { + if agentID, nickname, ok := extractSpawnedAgent(payload.Output); ok { + if request := session.spawnRequestByCall[payload.CallID]; request != "" { + session.subagentRequests[agentID] = request + } + if nickname != "" { + session.subagentNicknames[agentID] = nickname + } + item.Role = "subagent" + item.Title = "Subagent" + item.Content = renderSubagentSpawnOutput(payload.Output) + item.SubagentID = agentID + item.SubagentNickname = nickname + item.SubagentStatusType = "spawned" + item.SubagentRequest = session.subagentRequests[agentID] + item.Class = roleClass("subagent") + return &item + } + } + item.Role = "tool" + item.Class = roleClass("tool") + item.Content = renderFunctionCallOutputContent(payload.CallID, payload.Output) case "message": if payload.Role != "user" && payload.Role != "assistant" { return nil @@ -176,21 +346,62 @@ func parseResponseItem(env envelope, lineText string, lineNum int, session *Sess item.Content = trimUserRequest(item.Content) maybeUpdateMetaCwd(session, item.Content) } + if payload.Role == "user" { + if notification, ok := ExtractSubagentNotification(item.Content); ok { + item.Role = "subagent" + item.Title = "Subagent" + item.Content = notification.StatusText + item.SubagentID = notification.AgentID + item.SubagentNickname = session.subagentNicknames[notification.AgentID] + item.SubagentStatusType = notification.StatusType + item.SubagentRequest = session.subagentRequests[notification.AgentID] + } + } if item.Content == "" { item.Content = prettyJSON(string(env.Payload)) } - item.Class = roleClass(payload.Role) + item.Class = roleClass(item.Role) + if payload.Role == "user" && isToolWarningUserMessage(item.Content) { + item.Role = "assistant" + item.Title = "Agent" + item.Class = roleClass("assistant") + } case "reasoning": item.Role = "assistant" item.Class = roleClass("assistant") item.Content = extractReasoningSummary(env.Payload) - if item.Content == "" { - item.Content = prettyJSON(string(env.Payload)) + if strings.TrimSpace(item.Content) == "" { + return nil } + case "web_search_call": + item.Role = "tool" + item.Class = roleClass("tool") + item.Content = renderWebSearchCallContent(env.Payload) + if strings.TrimSpace(item.Content) == "" { + return nil + } + case "custom_tool_call": + item.ToolName = payload.Name + item.ToolStatus = payload.Status + item.ToolInput = payload.Input + item.Role = "tool" + item.Class = roleClass("tool") + item.Content = renderCustomToolCallContent(payload.Name, payload.Status, payload.CallID, payload.Input) + case "custom_tool_call_output": + item.Role = "tool" + item.Class = roleClass("tool") + item.Content = renderCustomToolCallOutputContent(payload.CallID, payload.Output) + case "ghost_snapshot": + item.Role = "system" + item.Class = roleClass("system") + item.Content = renderGhostSnapshotContent(env.Payload) default: return nil } + if strings.TrimSpace(item.Content) == "" { + item.Content = fencedJSONBlock(string(env.Payload)) + } if strings.TrimSpace(item.Content) == "" { item.Content = "(empty)" } @@ -217,18 +428,34 @@ func parseDirectMessage(lineText string, lineNum int, session *Session) *RenderI if payload.Role == "user" { item.Content = trimUserRequest(item.Content) maybeUpdateMetaCwd(session, item.Content) + if notification, ok := ExtractSubagentNotification(item.Content); ok { + item.Role = "subagent" + item.Title = "Subagent" + item.Content = notification.StatusText + item.SubagentID = notification.AgentID + item.SubagentStatusType = notification.StatusType + if session != nil { + item.SubagentNickname = session.subagentNicknames[notification.AgentID] + item.SubagentRequest = session.subagentRequests[notification.AgentID] + } + } } if item.Content == "" { item.Content = prettyJSON(lineText) } - item.Class = roleClass(payload.Role) + item.Class = roleClass(item.Role) + if payload.Role == "user" && isToolWarningUserMessage(item.Content) { + item.Role = "assistant" + item.Title = "Agent" + item.Class = roleClass("assistant") + } return &item } func parseDirectReasoning(lineText string, lineNum int) *RenderItem { content := extractReasoningSummary(json.RawMessage(lineText)) - if content == "" { - content = prettyJSON(lineText) + if strings.TrimSpace(content) == "" { + return nil } return &RenderItem{ Line: lineNum, @@ -309,6 +536,454 @@ func extractReasoningSummary(raw json.RawMessage) string { return strings.TrimSpace(strings.Join(parts, "\n")) } +func renderFunctionCallContent(name, callID, arguments string) string { + if strings.TrimSpace(name) == "exec_command" { + if content := renderExecCommandCallContent(callID, arguments); content != "" { + return content + } + } + if strings.TrimSpace(name) == "update_plan" { + if content := renderUpdatePlanCallContent(callID, arguments); content != "" { + return content + } + } + sections := make([]string, 0, 3) + if value := strings.TrimSpace(name); value != "" { + sections = append(sections, "**Tool:** "+value) + } + if value := strings.TrimSpace(callID); value != "" { + sections = append(sections, "**Call ID:** "+value) + } + if block := renderLabeledCodeBlock("Arguments", arguments); block != "" { + sections = append(sections, block) + } + return joinMarkdownSections(sections...) +} + +type updatePlanArgs struct { + Explanation string `json:"explanation"` + Plan []updatePlanStep `json:"plan"` +} + +type updatePlanStep struct { + Status string `json:"status"` + Step string `json:"step"` +} + +func renderUpdatePlanCallContent(callID, arguments string) string { + var payload updatePlanArgs + if err := json.Unmarshal([]byte(arguments), &payload); err != nil { + return "" + } + + sections := make([]string, 0, 4) + sections = append(sections, "**Tool:** update_plan") + if value := strings.TrimSpace(callID); value != "" { + sections = append(sections, "**Call ID:** "+value) + } + if value := strings.TrimSpace(payload.Explanation); value != "" { + sections = append(sections, renderLabeledPlainText("Explanation", value)) + } + if plan := renderUpdatePlanMarkdown(payload.Plan); plan != "" { + sections = append(sections, plan) + } + return joinMarkdownSections(sections...) +} + +func renderUpdatePlanMarkdown(steps []updatePlanStep) string { + items := make([]string, 0, len(steps)) + for _, step := range steps { + text := strings.TrimSpace(step.Step) + if text == "" { + continue + } + items = append(items, fmt.Sprintf("- %s %s", updatePlanStatusMarker(step.Status), text)) + } + if len(items) == 0 { + return "" + } + return fmt.Sprintf("**Plan**\n%s", strings.Join(items, "\n")) +} + +func updatePlanStatusMarker(status string) string { + switch strings.ToLower(strings.TrimSpace(status)) { + case "completed": + return "✅" + case "in_progress": + return "□" + case "pending": + return "□" + default: + value := strings.TrimSpace(status) + if value == "" { + return "□" + } + return "[" + value + "]" + } +} + +func renderExecCommandCallContent(callID, arguments string) string { + type execCommandArgs struct { + Cmd string `json:"cmd"` + Workdir string `json:"workdir"` + Justification string `json:"justification"` + SandboxPermissions string `json:"sandbox_permissions"` + } + + var payload execCommandArgs + if err := json.Unmarshal([]byte(arguments), &payload); err != nil { + return "" + } + + sections := make([]string, 0, 5) + sections = append(sections, "**Tool:** exec_command") + if value := strings.TrimSpace(callID); value != "" { + sections = append(sections, "**Call ID:** "+value) + } + if block := renderLabeledShellBlock("Command", payload.Cmd); block != "" { + sections = append(sections, block) + } + if value := strings.TrimSpace(payload.Workdir); value != "" { + sections = append(sections, "**Workdir:** `"+value+"`") + } + if value := strings.TrimSpace(payload.Justification); value != "" { + sections = append(sections, "**Justification:** "+value) + } + if value := strings.TrimSpace(payload.SandboxPermissions); value != "" && value != "use_default" { + sections = append(sections, "**Sandbox:** "+value) + } + return joinMarkdownSections(sections...) +} + +func renderFunctionCallOutputContent(callID, output string) string { + sections := make([]string, 0, 2) + if value := strings.TrimSpace(callID); value != "" { + sections = append(sections, "**Call ID:** "+value) + } + if text, ok := extractFunctionCallOutputText(output); ok { + sections = append(sections, renderLabeledPlainText("Output", text)) + } else if block := renderLabeledCodeBlock("Output", output); block != "" { + sections = append(sections, block) + } + return joinMarkdownSections(sections...) +} + +func renderCustomToolCallContent(name, status, callID, input string) string { + sections := make([]string, 0, 4) + if value := strings.TrimSpace(name); value != "" { + sections = append(sections, "**Custom tool:** "+value) + } + if value := strings.TrimSpace(status); value != "" { + sections = append(sections, "**Status:** "+value) + } + if value := strings.TrimSpace(callID); value != "" { + sections = append(sections, "**Call ID:** "+value) + } + if block := renderCustomToolCallInput(name, input); block != "" { + sections = append(sections, block) + } + return joinMarkdownSections(sections...) +} + +func renderCustomToolCallInput(name, input string) string { + if strings.TrimSpace(name) == "apply_patch" { + return renderLabeledDiffBlock("Patch", input) + } + return renderLabeledCodeBlock("Input", input) +} + +func renderCustomToolCallOutputContent(callID, output string) string { + sections := make([]string, 0, 2) + if value := strings.TrimSpace(callID); value != "" { + sections = append(sections, "**Call ID:** "+value) + } + if text, ok := extractCustomToolOutputText(output); ok { + sections = append(sections, renderLabeledPlainText("Output", text)) + } else if block := renderLabeledCodeBlock("Output", output); block != "" { + sections = append(sections, block) + } + return joinMarkdownSections(sections...) +} + +func renderWebSearchCallContent(raw json.RawMessage) string { + var payload struct { + Status string `json:"status"` + Action struct { + Type string `json:"type"` + Query string `json:"query"` + Queries []string `json:"queries"` + } `json:"action"` + } + if err := json.Unmarshal(raw, &payload); err != nil { + return "" + } + if strings.TrimSpace(payload.Action.Query) == "" && countNonEmptyStrings(payload.Action.Queries) == 0 { + return "" + } + + sections := make([]string, 0, 4) + if value := strings.TrimSpace(payload.Status); value != "" { + sections = append(sections, "**Status:** "+value) + } + if value := strings.TrimSpace(payload.Action.Type); value != "" { + sections = append(sections, "**Action:** "+value) + } + if block := renderLabeledCodeBlock("Query", payload.Action.Query); block != "" { + sections = append(sections, block) + } + if list := renderMarkdownList("Expanded queries", payload.Action.Queries); list != "" { + sections = append(sections, list) + } + return joinMarkdownSections(sections...) +} + +func countNonEmptyStrings(values []string) int { + count := 0 + for _, value := range values { + if strings.TrimSpace(value) != "" { + count++ + } + } + return count +} + +func renderGhostSnapshotContent(raw json.RawMessage) string { + var payload struct { + GhostCommit struct { + ID string `json:"id"` + Parent string `json:"parent"` + PreexistingUntrackedDirs []string `json:"preexisting_untracked_dirs"` + PreexistingUntrackedFiles []string `json:"preexisting_untracked_files"` + } `json:"ghost_commit"` + } + if err := json.Unmarshal(raw, &payload); err != nil { + return "" + } + + sections := make([]string, 0, 4) + if value := strings.TrimSpace(payload.GhostCommit.ID); value != "" { + sections = append(sections, "**Commit:** "+value) + } + if value := strings.TrimSpace(payload.GhostCommit.Parent); value != "" { + sections = append(sections, "**Parent:** "+value) + } + if list := renderMarkdownList("Preexisting untracked files", payload.GhostCommit.PreexistingUntrackedFiles); list != "" { + sections = append(sections, list) + } else { + sections = append(sections, "**Preexisting untracked files:** 0") + } + if list := renderMarkdownList("Preexisting untracked directories", payload.GhostCommit.PreexistingUntrackedDirs); list != "" { + sections = append(sections, list) + } else { + sections = append(sections, "**Preexisting untracked directories:** 0") + } + return joinMarkdownSections(sections...) +} + +func renderLabeledCodeBlock(label, value string) string { + trimmed := strings.TrimSpace(value) + if trimmed == "" { + return "" + } + if pretty, ok := prettyJSONString(trimmed); ok { + return fmt.Sprintf("**%s**\n%s", label, fencedCodeBlock("json", pretty)) + } + return fmt.Sprintf("**%s**\n%s", label, fencedCodeBlock("", trimmed)) +} + +func renderLabeledShellBlock(label, value string) string { + trimmed := strings.TrimSpace(value) + if trimmed == "" { + return "" + } + return fmt.Sprintf("**%s**\n%s", label, fencedCodeBlock("sh", trimmed)) +} + +func renderLabeledDiffBlock(label, value string) string { + trimmed := strings.TrimSpace(value) + if trimmed == "" { + return "" + } + return fmt.Sprintf("**%s**\n%s", label, fencedCodeBlock("diff", trimmed)) +} + +func renderLabeledPlainText(label, value string) string { + trimmed := strings.TrimSpace(value) + if trimmed == "" { + return "" + } + return fmt.Sprintf("**%s**\n%s", label, preserveMarkdownLineBreaks(html.EscapeString(trimmed))) +} + +func renderMarkdownList(label string, values []string) string { + items := make([]string, 0, len(values)) + for _, value := range values { + value = strings.TrimSpace(value) + if value == "" { + continue + } + items = append(items, "- "+value) + } + if len(items) == 0 { + return "" + } + return fmt.Sprintf("**%s**\n%s", label, strings.Join(items, "\n")) +} + +func joinMarkdownSections(sections ...string) string { + filtered := make([]string, 0, len(sections)) + for _, section := range sections { + section = strings.TrimSpace(section) + if section == "" { + continue + } + filtered = append(filtered, section) + } + return strings.Join(filtered, "\n\n") +} + +func preserveMarkdownLineBreaks(text string) string { + lines := strings.Split(text, "\n") + for i, line := range lines { + lines[i] = strings.TrimRight(line, "\r") + } + return strings.Join(lines, " \n") +} + +func prettyJSONString(raw string) (string, bool) { + var decoded any + if err := json.Unmarshal([]byte(raw), &decoded); err != nil { + return "", false + } + formatted, err := json.MarshalIndent(decoded, "", " ") + if err != nil { + return "", false + } + return string(formatted), true +} + +func fencedJSONBlock(raw string) string { + trimmed := strings.TrimSpace(raw) + if trimmed == "" { + return "" + } + return fencedCodeBlock("json", prettyJSON(trimmed)) +} + +func fencedCodeBlock(language, value string) string { + fence := markdownFence(value) + if language != "" { + return fence + language + "\n" + value + "\n" + fence + } + return fence + "\n" + value + "\n" + fence +} + +func markdownFence(value string) string { + maxRun := 0 + currentRun := 0 + for _, r := range value { + if r == '`' { + currentRun++ + if currentRun > maxRun { + maxRun = currentRun + } + continue + } + currentRun = 0 + } + if maxRun < 3 { + maxRun = 3 + } else { + maxRun++ + } + return strings.Repeat("`", maxRun) +} + +func extractCustomToolOutputText(output string) (string, bool) { + trimmed := strings.TrimSpace(output) + if trimmed == "" { + return "", false + } + var payload struct { + Output string `json:"output"` + } + if err := json.Unmarshal([]byte(trimmed), &payload); err != nil { + sanitized := strings.NewReplacer("\r\n", "\\n", "\n", "\\n", "\r", "\\r").Replace(trimmed) + if err := json.Unmarshal([]byte(sanitized), &payload); err != nil { + return "", false + } + } + if strings.TrimSpace(payload.Output) == "" { + return "", false + } + return payload.Output, true +} + +func extractFunctionCallOutputText(output string) (string, bool) { + trimmed := strings.TrimSpace(output) + if trimmed == "" { + return "", false + } + + if texts, ok := extractTextPayloads(trimmed); ok { + return strings.Join(texts, "\n\n"), true + } + return "", false +} + +type toolOutputTextPayload struct { + Type string `json:"type"` + Text string `json:"text"` + Content []toolOutputTextPayload `json:"content"` +} + +func extractTextPayloads(raw string) ([]string, bool) { + var list []toolOutputTextPayload + if err := json.Unmarshal([]byte(raw), &list); err == nil { + if texts := collectTextPayloads(list); len(texts) > 0 { + return texts, true + } + } + + var single toolOutputTextPayload + if err := json.Unmarshal([]byte(raw), &single); err == nil { + if texts := collectTextPayloads(single.Content); len(texts) > 0 { + return texts, true + } + if strings.TrimSpace(single.Text) != "" && isTextPayloadType(single.Type) { + return []string{single.Text}, true + } + } + + return nil, false +} + +func collectTextPayloads(items []toolOutputTextPayload) []string { + texts := make([]string, 0, len(items)) + for _, item := range items { + if nested := collectTextPayloads(item.Content); len(nested) > 0 { + texts = append(texts, nested...) + } + if strings.TrimSpace(item.Text) == "" { + continue + } + if !isTextPayloadType(item.Type) { + continue + } + texts = append(texts, item.Text) + } + return texts +} + +func isTextPayloadType(payloadType string) bool { + switch strings.TrimSpace(payloadType) { + case "", "text", "input_text", "output_text", "summary_text": + return true + default: + return false + } +} + func applyMeta(session *Session, meta SessionMeta) { if session == nil { return @@ -326,20 +1001,91 @@ func mergeMeta(target *SessionMeta, meta SessionMeta) { if target.ID == "" { target.ID = meta.ID } + if target.ForkedFromID == "" { + target.ForkedFromID = meta.ForkedFromID + } if target.Timestamp == "" { target.Timestamp = meta.Timestamp } if target.Cwd == "" { target.Cwd = meta.Cwd } + if target.Git == nil { + target.Git = meta.Git + } else { + mergeSessionMetaGit(target.Git, meta.Git) + } if target.Originator == "" { target.Originator = meta.Originator } if target.CliVersion == "" { target.CliVersion = meta.CliVersion } + if target.AgentNickname == "" { + target.AgentNickname = meta.AgentNickname + } + if target.AgentRole == "" { + target.AgentRole = meta.AgentRole + } + if target.Source == nil { + target.Source = meta.Source + } else { + mergeSessionMetaSource(target.Source, meta.Source) + } if target.Instructions == "" { - target.Instructions = meta.Instructions + switch { + case meta.Instructions != "": + target.Instructions = meta.Instructions + case meta.BaseInstructions != nil: + target.Instructions = meta.BaseInstructions.Text + } + } +} + +func mergeSessionMetaSource(target, src *sessionMetaSource) { + if target == nil || src == nil { + return + } + if target.Subagent == nil { + target.Subagent = src.Subagent + return + } + if src.Subagent == nil { + return + } + if target.Subagent.ThreadSpawn == nil { + target.Subagent.ThreadSpawn = src.Subagent.ThreadSpawn + return + } + if src.Subagent.ThreadSpawn == nil { + return + } + if target.Subagent.ThreadSpawn.ParentThreadID == "" { + target.Subagent.ThreadSpawn.ParentThreadID = src.Subagent.ThreadSpawn.ParentThreadID + } + if target.Subagent.ThreadSpawn.Depth == 0 { + target.Subagent.ThreadSpawn.Depth = src.Subagent.ThreadSpawn.Depth + } + if target.Subagent.ThreadSpawn.AgentNickname == "" { + target.Subagent.ThreadSpawn.AgentNickname = src.Subagent.ThreadSpawn.AgentNickname + } + if target.Subagent.ThreadSpawn.AgentRole == "" { + target.Subagent.ThreadSpawn.AgentRole = src.Subagent.ThreadSpawn.AgentRole + } +} + +func mergeSessionMetaGit(target, src *sessionMetaGit) { + if target == nil || src == nil { + return + } + if target.CommitHash == "" { + target.CommitHash = src.CommitHash + } + if target.Branch == "" { + target.Branch = src.Branch + } + if target.RepositoryURL == "" { + target.RepositoryURL = src.RepositoryURL } } @@ -348,21 +1094,28 @@ func applyMetaLine(session *Session, lineText string) bool { if err := json.Unmarshal([]byte(lineText), &meta); err != nil { return false } - if meta.ID == "" && meta.Timestamp == "" && meta.Cwd == "" && meta.Originator == "" && meta.CliVersion == "" && meta.Instructions == nil { + if meta.ID == "" && meta.ForkedFromID == "" && meta.Timestamp == "" && meta.Cwd == "" && meta.Git == nil && meta.Originator == "" && meta.CliVersion == "" && meta.AgentNickname == "" && meta.AgentRole == "" && meta.Source == nil && meta.Instructions == nil && meta.BaseInstructions == nil { return false } merged := SessionMeta{ - ID: meta.ID, - Timestamp: meta.Timestamp, - Cwd: meta.Cwd, - Originator: meta.Originator, - CliVersion: meta.CliVersion, + ID: meta.ID, + ForkedFromID: meta.ForkedFromID, + Timestamp: meta.Timestamp, + Cwd: meta.Cwd, + Git: meta.Git, + Originator: meta.Originator, + CliVersion: meta.CliVersion, + AgentNickname: meta.AgentNickname, + AgentRole: meta.AgentRole, + Source: meta.Source, } if meta.Instructions != nil { merged.Instructions = *meta.Instructions + } else if meta.BaseInstructions != nil { + merged.Instructions = meta.BaseInstructions.Text } applyMeta(session, merged) - return meta.ID != "" || meta.Cwd != "" || meta.Timestamp != "" + return meta.ID != "" || meta.ForkedFromID != "" || meta.Cwd != "" || meta.Timestamp != "" || meta.Git != nil } func prettyJSON(raw string) string { @@ -382,6 +1135,14 @@ func titleForType(eventType, subType string) string { return "Tool call" case "function_call_output": return "Tool output" + case "custom_tool_call": + return "Custom tool call" + case "custom_tool_call_output": + return "Custom tool output" + case "web_search_call": + return "Web search" + case "ghost_snapshot": + return "Ghost snapshot" case "reasoning": return "Reasoning" default: @@ -403,6 +1164,8 @@ func titleForRole(role string) string { return "User" case "assistant": return "Agent" + case "subagent": + return "Subagent" default: return "Message" } @@ -414,6 +1177,8 @@ func roleClass(role string) string { return "role-user" case "assistant": return "role-assistant" + case "subagent": + return "role-subagent" case "system": return "role-system" case "tool": @@ -433,8 +1198,13 @@ func mergeConsecutive(items []RenderItem) []RenderItem { current := items[0] for i := 1; i < len(items); i++ { item := items[i] - if current.Type == item.Type && current.Subtype == item.Subtype && current.Role == item.Role { + if current.Type == item.Type && current.Subtype == item.Subtype && current.Role == item.Role && shouldMergeConsecutive(current, item) { if isUserMessage(item) { + if IsAutoContextUserMessage(current.Content) || IsAutoContextUserMessage(item.Content) { + out = append(out, current) + current = item + continue + } current = item continue } @@ -454,6 +1224,18 @@ func mergeConsecutive(items []RenderItem) []RenderItem { return out } +func shouldMergeConsecutive(current, item RenderItem) bool { + if current.SubagentID != "" || item.SubagentID != "" { + return false + } + switch current.Subtype { + case "message", "reasoning": + return true + default: + return false + } +} + func trimUserRequest(content string) string { if !trimUserRequestEnabled { return content @@ -476,10 +1258,7 @@ func IsAutoContextUserMessage(content string) bool { if trimmed == "" { return false } - if isAgentsInstructionsOnly(trimmed) { - return true - } - if isTaggedBlocksOnly(trimmed) { + if isAutoContextBlocksOnly(trimmed) { return true } if isLegacyCwdOnly(trimmed) { @@ -525,19 +1304,31 @@ func isUserMessage(item RenderItem) bool { } func isAgentsInstructionsOnly(text string) bool { - if !strings.HasPrefix(text, "# AGENTS.md instructions") { - return false - } - openIdx := strings.Index(text, "") - if openIdx == -1 { - return false - } - closeIdx := strings.Index(text, "") - if closeIdx == -1 { + remaining, ok := consumeAgentsInstructionsBlock(text) + return ok && strings.TrimSpace(remaining) == "" +} + +func isAutoContextBlocksOnly(text string) bool { + remaining := text + consumed := false + for { + remaining = strings.TrimSpace(remaining) + if remaining == "" { + return consumed + } + var ok bool + remaining, ok = consumeAgentsInstructionsBlock(remaining) + if ok { + consumed = true + continue + } + remaining, ok = consumeAutoContextTaggedBlock(remaining) + if ok { + consumed = true + continue + } return false } - after := strings.TrimSpace(text[closeIdx+len(""):]) - return after == "" } func isTaggedBlocksOnly(text string) bool { @@ -548,11 +1339,7 @@ func isTaggedBlocksOnly(text string) bool { return true } var ok bool - remaining, ok = consumeTaggedBlock(remaining, "", "") - if ok { - continue - } - remaining, ok = consumeTaggedBlock(remaining, "", "") + remaining, ok = consumeAutoContextTaggedBlock(remaining) if ok { continue } @@ -560,6 +1347,22 @@ func isTaggedBlocksOnly(text string) bool { } } +func consumeAgentsInstructionsBlock(text string) (string, bool) { + if !strings.HasPrefix(text, "# AGENTS.md instructions") { + return text, false + } + openIdx := strings.Index(text, "") + if openIdx == -1 { + return text, false + } + closeIdx := strings.Index(text, "") + if closeIdx == -1 { + return text, false + } + rest := text[closeIdx+len(""):] + return rest, true +} + func consumeTaggedBlock(text, openTag, closeTag string) (string, bool) { if !strings.HasPrefix(text, openTag) { return text, false @@ -572,6 +1375,18 @@ func consumeTaggedBlock(text, openTag, closeTag string) (string, bool) { return rest, true } +func consumeAutoContextTaggedBlock(text string) (string, bool) { + for _, pair := range [][2]string{ + {"", ""}, + {"", ""}, + } { + if rest, ok := consumeTaggedBlock(text, pair[0], pair[1]); ok { + return rest, true + } + } + return text, false +} + func isLegacyCwdOnly(text string) bool { lines := strings.Split(text, "\n") for _, line := range lines { @@ -589,3 +1404,32 @@ func isLegacyCwdOnly(text string) bool { } return true } + +func isToolWarningUserMessage(content string) bool { + trimmed := strings.TrimSpace(content) + if trimmed == "" { + return false + } + lines := strings.Split(trimmed, "\n") + nonEmpty := 0 + lastNonEmpty := "" + for _, line := range lines { + line = strings.TrimSpace(line) + if line == "" { + continue + } + nonEmpty++ + lastNonEmpty = line + if nonEmpty > 1 { + return false + } + } + trimmed = lastNonEmpty + if !strings.HasPrefix(trimmed, "Warning: apply_patch was requested via ") { + return false + } + if !strings.HasSuffix(trimmed, "Use the apply_patch tool instead of exec_command.") { + return false + } + return true +} diff --git a/internal/sessions/parser_test.go b/internal/sessions/parser_test.go index 28c2fd7..d6f3a17 100644 --- a/internal/sessions/parser_test.go +++ b/internal/sessions/parser_test.go @@ -3,6 +3,7 @@ package sessions import ( "os" "path/filepath" + "strings" "testing" ) @@ -10,7 +11,7 @@ func TestParseSession(t *testing.T) { base := t.TempDir() filePath := filepath.Join(base, "session.jsonl") data := "" + - "{\"timestamp\":\"2026-01-09T01:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"abc\",\"timestamp\":\"2026-01-09T01:00:00Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\",\"instructions\":\"hello\"}}\n" + + "{\"timestamp\":\"2026-01-09T01:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"abc\",\"timestamp\":\"2026-01-09T01:00:00Z\",\"cwd\":\"/tmp\",\"git\":{\"branch\":\"feature/test-branch\",\"commit_hash\":\"abc123\",\"repository_url\":\"https://github.com/cinkster/codex-manager.git\"},\"originator\":\"cli\",\"cli_version\":\"0.1\",\"instructions\":\"hello\"}}\n" + "{\"timestamp\":\"2026-01-09T01:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"Context that should be dropped\"}]}}\n" + "{\"timestamp\":\"2026-01-09T01:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"Hello\\n\\n## My request for Codex:\\nOnly this\"}]}}\n" + "{\"timestamp\":\"2026-01-09T01:00:02Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call\",\"name\":\"shell_command\",\"arguments\":\"{}\",\"call_id\":\"call_1\"}}\n" + @@ -32,17 +33,29 @@ func TestParseSession(t *testing.T) { if session.Meta == nil || session.Meta.ID != "abc" { t.Fatalf("expected session meta") } - if len(session.Items) != 3 { - t.Fatalf("expected 3 items, got %d", len(session.Items)) + if session.Meta.GitBranch() != "feature/test-branch" { + t.Fatalf("expected git branch, got %q", session.Meta.GitBranch()) + } + if session.Meta.GitRepositoryURL() != "https://github.com/cinkster/codex-manager.git" { + t.Fatalf("expected git repository url, got %q", session.Meta.GitRepositoryURL()) + } + if len(session.Items) != 5 { + t.Fatalf("expected 5 items, got %d", len(session.Items)) } if session.Items[0].Content != "Only this" { t.Fatalf("unexpected message content: %q", session.Items[0].Content) } - if session.Items[1].Content != "Reason" { - t.Fatalf("expected reasoning summary, got %q", session.Items[1].Content) + if session.Items[1].Subtype != "function_call" || !strings.Contains(session.Items[1].Content, "**Tool:** shell_command") { + t.Fatalf("expected visible tool call, got %#v", session.Items[1]) } - if session.Items[2].Content != "Later" { - t.Fatalf("expected last user message, got %q", session.Items[2].Content) + if session.Items[2].Subtype != "function_call_output" || !strings.Contains(session.Items[2].Content, "**Output**") { + t.Fatalf("expected visible tool output, got %#v", session.Items[2]) + } + if session.Items[3].Content != "Reason" { + t.Fatalf("expected reasoning summary, got %q", session.Items[3].Content) + } + if session.Items[4].Content != "Later" { + t.Fatalf("expected last user message, got %q", session.Items[4].Content) } } @@ -80,3 +93,413 @@ func TestParseSessionDirectFormat(t *testing.T) { t.Fatalf("unexpected reasoning content: %q", session.Items[2].Content) } } + +func TestParseSessionCapturesSubagentThreadMeta(t *testing.T) { + base := t.TempDir() + filePath := filepath.Join(base, "session.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-13T00:23:02Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"agent-1\",\"forked_from_id\":\"parent-1\",\"timestamp\":\"2026-03-13T00:23:02Z\",\"cwd\":\"/tmp\",\"originator\":\"codex_cli_rs\",\"cli_version\":\"0.114.0\",\"source\":{\"subagent\":{\"thread_spawn\":{\"parent_thread_id\":\"parent-1\",\"depth\":1,\"agent_nickname\":\"Anscombe\",\"agent_role\":\"explorer\"}}},\"agent_nickname\":\"Anscombe\",\"agent_role\":\"explorer\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:23:03Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"assistant\",\"content\":[{\"type\":\"output_text\",\"text\":\"hello\"}]}}\n" + + if err := os.WriteFile(filePath, []byte(data), 0o644); err != nil { + t.Fatalf("write: %v", err) + } + + session, err := ParseSession(filePath) + if err != nil { + t.Fatalf("parse: %v", err) + } + if session.Meta == nil { + t.Fatalf("expected session meta") + } + if !session.Meta.IsSubagentThread() { + t.Fatalf("expected subagent thread meta, got %#v", session.Meta) + } + if session.Meta.ParentThreadID() != "parent-1" { + t.Fatalf("expected parent thread id, got %q", session.Meta.ParentThreadID()) + } + if session.Meta.SubagentNicknameValue() != "Anscombe" { + t.Fatalf("expected subagent nickname, got %q", session.Meta.SubagentNicknameValue()) + } + if session.Meta.SubagentRoleValue() != "explorer" { + t.Fatalf("expected subagent role, got %q", session.Meta.SubagentRoleValue()) + } +} + +func TestParseSessionPreservesAutoContextBeforeUserRequest(t *testing.T) { + base := t.TempDir() + filePath := filepath.Join(base, "session.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-09T01:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"abc\",\"timestamp\":\"2026-03-09T01:00:00Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\",\"base_instructions\":{\"text\":\"base\"}}}\n" + + "{\"timestamp\":\"2026-03-09T01:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"developer\",\"content\":[{\"type\":\"input_text\",\"text\":\"ignored\"}]}}\n" + + "{\"timestamp\":\"2026-03-09T01:00:02Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"# AGENTS.md instructions for /tmp\\n\\n\\nhello\\n\"},{\"type\":\"input_text\",\"text\":\"\\nCurrent working directory: /tmp\\n\"}]}}\n" + + "{\"timestamp\":\"2026-03-09T01:00:03Z\",\"type\":\"event_msg\",\"payload\":{\"type\":\"task_started\"}}\n" + + "{\"timestamp\":\"2026-03-09T01:00:04Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"developer\",\"content\":[{\"type\":\"input_text\",\"text\":\"ignored again\"}]}}\n" + + "{\"timestamp\":\"2026-03-09T01:00:05Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nFix it\"}]}}\n" + + if err := os.WriteFile(filePath, []byte(data), 0o644); err != nil { + t.Fatalf("write: %v", err) + } + + session, err := ParseSession(filePath) + if err != nil { + t.Fatalf("parse: %v", err) + } + if session.Meta == nil || session.Meta.Instructions != "base" { + t.Fatalf("expected base instructions to populate session meta, got %#v", session.Meta) + } + if len(session.Items) != 2 { + t.Fatalf("expected 2 items, got %d", len(session.Items)) + } + if !IsAutoContextUserMessage(session.Items[0].Content) { + t.Fatalf("expected first item to be detected as auto context: %q", session.Items[0].Content) + } + if session.Items[1].Content != "Fix it" { + t.Fatalf("expected trimmed user request, got %q", session.Items[1].Content) + } +} + +func TestParseSessionExtractsSubagentNotification(t *testing.T) { + base := t.TempDir() + filePath := filepath.Join(base, "session.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-13T00:25:44Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"abc\",\"timestamp\":\"2026-03-13T00:25:44Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:25:44Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call\",\"name\":\"spawn_agent\",\"arguments\":\"{\\\"message\\\":\\\"review BillingStatusService\\\"}\",\"call_id\":\"call_1\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:25:44Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call_output\",\"call_id\":\"call_1\",\"output\":\"{\\\"agent_id\\\":\\\"agent-1\\\",\\\"nickname\\\":\\\"Anscombe\\\"}\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:25:45Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"\\n{\\\"agent_id\\\":\\\"agent-1\\\",\\\"status\\\":{\\\"completed\\\":\\\"done\\\"}}\\n\"}]}}\n" + + "{\"timestamp\":\"2026-03-13T00:25:46Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nContinue\"}]}}\n" + + if err := os.WriteFile(filePath, []byte(data), 0o644); err != nil { + t.Fatalf("write: %v", err) + } + + session, err := ParseSession(filePath) + if err != nil { + t.Fatalf("parse: %v", err) + } + if len(session.Items) != 4 { + t.Fatalf("expected 4 items, got %d", len(session.Items)) + } + call := session.Items[0] + if call.Subtype != "function_call" || call.Role != "tool" { + t.Fatalf("expected visible spawn_agent tool call, got %#v", call) + } + if !strings.Contains(call.Content, "review BillingStatusService") { + t.Fatalf("expected spawn_agent request to stay visible, got %q", call.Content) + } + + spawned := session.Items[1] + if spawned.SubagentID != "agent-1" { + t.Fatalf("expected spawned subagent id, got %q", spawned.SubagentID) + } + if spawned.SubagentStatusType != "spawned" { + t.Fatalf("expected spawned status type, got %q", spawned.SubagentStatusType) + } + if !strings.Contains(spawned.Content, "\"nickname\": \"Anscombe\"") { + t.Fatalf("expected spawned content to include nickname json, got %q", spawned.Content) + } + + item := session.Items[2] + if item.SubagentID != "agent-1" { + t.Fatalf("expected subagent id, got %q", item.SubagentID) + } + if item.SubagentNickname != "Anscombe" { + t.Fatalf("expected subagent nickname, got %q", item.SubagentNickname) + } + if item.SubagentStatusType != "completed" { + t.Fatalf("expected subagent status type, got %q", item.SubagentStatusType) + } + if item.SubagentRequest != "review BillingStatusService" { + t.Fatalf("expected subagent request, got %q", item.SubagentRequest) + } + if item.Title != "Subagent" { + t.Fatalf("expected Subagent title, got %q", item.Title) + } + if item.Content != "done" { + t.Fatalf("expected extracted notification content, got %q", item.Content) + } + if IsAutoContextUserMessage(item.Content) { + t.Fatalf("expected subagent notification to stay visible") + } + if session.Items[3].Content != "Continue" { + t.Fatalf("expected trimmed user request, got %q", session.Items[3].Content) + } +} + +func TestRenderFunctionCallOutputContentUsesLongerFenceWhenOutputContainsBackticks(t *testing.T) { + output := "Chunk ID: a5b125\nOutput:\n\n```bash\n./gradlew clean build\n```\n" + + rendered := renderFunctionCallOutputContent("call_1", output) + + marker := "**Output**\n" + index := strings.Index(rendered, marker) + if index < 0 { + t.Fatalf("expected output marker, got %q", rendered) + } + if !strings.HasPrefix(rendered[index+len(marker):], "````\n") { + t.Fatalf("expected outer fence to expand beyond triple backticks, got %q", rendered[index+len(marker):]) + } + if !strings.Contains(rendered, "\n````") { + t.Fatalf("expected expanded closing fence, got %q", rendered) + } +} + +func TestRenderFunctionCallOutputContentExtractsTextPayload(t *testing.T) { + output := `[ + { + "text": "Current user is a member of 2 teams:\n\n\n{\n \"teams\": [\n {\n \"id\": \"team-1\"\n }\n ]\n}\n", + "type": "text" + } +]` + + rendered := renderFunctionCallOutputContent("call_1", output) + + if !strings.Contains(rendered, "Current user is a member of 2 teams:") { + t.Fatalf("expected extracted text payload, got %q", rendered) + } + if strings.Contains(rendered, "\"type\": \"text\"") || strings.Contains(rendered, "```") { + t.Fatalf("expected raw json payload to stay hidden, got %q", rendered) + } + if !strings.Contains(rendered, "<json>") || !strings.Contains(rendered, "</json>") { + t.Fatalf("expected angle-bracket markers to stay visible as text, got %q", rendered) + } +} + +func TestRenderCustomToolCallContentFormatsApplyPatchAsDiff(t *testing.T) { + input := "*** Begin Patch\n*** Update File: /tmp/file.txt\n@@\n-old\n+new\n*** End Patch" + + rendered := renderCustomToolCallContent("apply_patch", "completed", "call_patch", input) + + if !strings.Contains(rendered, "**Patch**") { + t.Fatalf("expected patch label, got %q", rendered) + } + if !strings.Contains(rendered, "```diff") { + t.Fatalf("expected diff fence, got %q", rendered) + } + if !strings.Contains(rendered, "*** Update File: /tmp/file.txt") || !strings.Contains(rendered, "+new") { + t.Fatalf("expected patch body, got %q", rendered) + } +} + +func TestParseSessionKeepsMultipleSpawnedSubagentsSeparate(t *testing.T) { + base := t.TempDir() + filePath := filepath.Join(base, "session.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-13T00:25:44Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"abc\",\"timestamp\":\"2026-03-13T00:25:44Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:25:44Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call\",\"name\":\"spawn_agent\",\"arguments\":\"{\\\"message\\\":\\\"review A\\\"}\",\"call_id\":\"call_1\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:25:44Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call\",\"name\":\"spawn_agent\",\"arguments\":\"{\\\"message\\\":\\\"review B\\\"}\",\"call_id\":\"call_2\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:25:44Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call_output\",\"call_id\":\"call_1\",\"output\":\"{\\\"agent_id\\\":\\\"agent-1\\\",\\\"nickname\\\":\\\"Anscombe\\\"}\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:25:45Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call_output\",\"call_id\":\"call_2\",\"output\":\"{\\\"agent_id\\\":\\\"agent-2\\\",\\\"nickname\\\":\\\"Boyle\\\"}\"}}\n" + + if err := os.WriteFile(filePath, []byte(data), 0o644); err != nil { + t.Fatalf("write: %v", err) + } + + session, err := ParseSession(filePath) + if err != nil { + t.Fatalf("parse: %v", err) + } + if len(session.Items) != 4 { + t.Fatalf("expected 4 items, got %d", len(session.Items)) + } + if session.Items[0].Subtype != "function_call" || session.Items[1].Subtype != "function_call" { + t.Fatalf("expected visible function calls to stay separate, got %#v", session.Items) + } + if session.Items[2].SubagentID != "agent-1" || session.Items[3].SubagentID != "agent-2" { + t.Fatalf("expected spawned subagents to stay separate, got %#v", session.Items) + } +} + +func TestParseSessionShowsSelectedResponseItemsAndSkipsEncryptedOnlyReasoning(t *testing.T) { + base := t.TempDir() + filePath := filepath.Join(base, "session.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-18T01:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"abc\",\"timestamp\":\"2026-03-18T01:00:00Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T01:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"web_search_call\",\"status\":\"completed\",\"action\":{\"type\":\"search\",\"query\":\"Codex CLI notify hook\",\"queries\":[\"Codex CLI notify hook\",\"OpenAI Codex notifications\"]}}}\n" + + "{\"timestamp\":\"2026-03-18T01:00:02Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"custom_tool_call\",\"status\":\"completed\",\"call_id\":\"call_patch\",\"name\":\"apply_patch\",\"input\":\"*** Begin Patch\\n*** End Patch\"}}\n" + + "{\"timestamp\":\"2026-03-18T01:00:03Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"custom_tool_call_output\",\"call_id\":\"call_patch\",\"output\":\"{\\\"output\\\":\\\"Success\\\",\\\"metadata\\\":{\\\"exit_code\\\":0}}\"}}\n" + + "{\"timestamp\":\"2026-03-18T01:00:04Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"ghost_snapshot\",\"ghost_commit\":{\"id\":\"abc123\",\"parent\":\"def456\",\"preexisting_untracked_files\":[\"tmp/note.md\"],\"preexisting_untracked_dirs\":[]}}}\n" + + "{\"timestamp\":\"2026-03-18T01:00:05Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"reasoning\",\"summary\":[],\"content\":null,\"encrypted_content\":\"secret\"}}\n" + + "{\"timestamp\":\"2026-03-18T01:00:06Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"reasoning\",\"summary\":[{\"type\":\"summary_text\",\"text\":\"Keep this\"}]}}\n" + + if err := os.WriteFile(filePath, []byte(data), 0o644); err != nil { + t.Fatalf("write: %v", err) + } + + session, err := ParseSession(filePath) + if err != nil { + t.Fatalf("parse: %v", err) + } + if len(session.Items) != 5 { + t.Fatalf("expected 5 items, got %d", len(session.Items)) + } + if session.Items[0].Subtype != "web_search_call" || !strings.Contains(session.Items[0].Content, "Expanded queries") { + t.Fatalf("expected visible web search call, got %#v", session.Items[0]) + } + if session.Items[1].Subtype != "custom_tool_call" || !strings.Contains(session.Items[1].Content, "apply_patch") { + t.Fatalf("expected visible custom tool call, got %#v", session.Items[1]) + } + if session.Items[2].Subtype != "custom_tool_call_output" || !strings.Contains(session.Items[2].Content, "Success") { + t.Fatalf("expected visible custom tool output, got %#v", session.Items[2]) + } + if strings.Contains(session.Items[2].Content, "\"exit_code\": 0") || strings.Contains(session.Items[2].Content, "```") { + t.Fatalf("expected custom tool output to use extracted plain output, got %#v", session.Items[2]) + } + if session.Items[3].Subtype != "ghost_snapshot" || !strings.Contains(session.Items[3].Content, "abc123") { + t.Fatalf("expected visible ghost snapshot, got %#v", session.Items[3]) + } + if session.Items[4].Subtype != "reasoning" || session.Items[4].Content != "Keep this" { + t.Fatalf("expected only non-empty reasoning to remain, got %#v", session.Items[4]) + } +} + +func TestParseSessionSkipsQuerylessWebSearchCall(t *testing.T) { + base := t.TempDir() + filePath := filepath.Join(base, "session.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-18T01:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"abc\",\"timestamp\":\"2026-03-18T01:00:00Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T01:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"web_search_call\",\"status\":\"completed\",\"action\":{\"type\":\"open_page\"}}}\n" + + "{\"timestamp\":\"2026-03-18T01:00:02Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"assistant\",\"content\":[{\"type\":\"output_text\",\"text\":\"visible\"}]}}\n" + + if err := os.WriteFile(filePath, []byte(data), 0o644); err != nil { + t.Fatalf("write: %v", err) + } + + session, err := ParseSession(filePath) + if err != nil { + t.Fatalf("parse: %v", err) + } + if len(session.Items) != 1 { + t.Fatalf("expected only visible message item, got %d %#v", len(session.Items), session.Items) + } + if session.Items[0].Subtype != "message" || session.Items[0].Content != "visible" { + t.Fatalf("expected assistant message to remain, got %#v", session.Items[0]) + } +} + +func TestParseSessionFormatsExecCommandToolCallSummary(t *testing.T) { + base := t.TempDir() + filePath := filepath.Join(base, "session.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-18T01:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"abc\",\"timestamp\":\"2026-03-18T01:00:00Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T01:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call\",\"name\":\"exec_command\",\"arguments\":\"{\\\"cmd\\\":\\\"sed -n '140,170p' /tmp/README.md\\\",\\\"workdir\\\":\\\"/tmp/project\\\",\\\"max_output_tokens\\\":4000}\",\"call_id\":\"call_exec\"}}\n" + + if err := os.WriteFile(filePath, []byte(data), 0o644); err != nil { + t.Fatalf("write: %v", err) + } + + session, err := ParseSession(filePath) + if err != nil { + t.Fatalf("parse: %v", err) + } + if len(session.Items) != 1 { + t.Fatalf("expected 1 item, got %d", len(session.Items)) + } + item := session.Items[0] + if item.Subtype != "function_call" { + t.Fatalf("expected function_call item, got %#v", item) + } + if !strings.Contains(item.Content, "**Command**") || !strings.Contains(item.Content, "sed -n '140,170p' /tmp/README.md") { + t.Fatalf("expected command summary, got %q", item.Content) + } + if !strings.Contains(item.Content, "**Workdir:** `/tmp/project`") { + t.Fatalf("expected workdir summary, got %q", item.Content) + } + if strings.Contains(item.Content, "max_output_tokens") || strings.Contains(item.Content, "**Arguments**") { + t.Fatalf("expected raw arguments to be hidden, got %q", item.Content) + } +} + +func TestParseSessionFormatsUpdatePlanToolCallSummary(t *testing.T) { + base := t.TempDir() + filePath := filepath.Join(base, "session.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-18T01:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"abc\",\"timestamp\":\"2026-03-18T01:00:00Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T01:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call\",\"name\":\"update_plan\",\"arguments\":\"{\\\"explanation\\\":\\\"Keep the session page in sync with /active.\\\",\\\"plan\\\":[{\\\"status\\\":\\\"completed\\\",\\\"step\\\":\\\"Add active state fields\\\"},{\\\"status\\\":\\\"in_progress\\\",\\\"step\\\":\\\"Render detailed page controls\\\"},{\\\"status\\\":\\\"pending\\\",\\\"step\\\":\\\"Run tests and update memo\\\"}]}\",\"call_id\":\"call_plan\"}}\n" + + if err := os.WriteFile(filePath, []byte(data), 0o644); err != nil { + t.Fatalf("write: %v", err) + } + + session, err := ParseSession(filePath) + if err != nil { + t.Fatalf("parse: %v", err) + } + if len(session.Items) != 1 { + t.Fatalf("expected 1 item, got %d", len(session.Items)) + } + item := session.Items[0] + if item.Subtype != "function_call" { + t.Fatalf("expected function_call item, got %#v", item) + } + if !strings.Contains(item.Content, "**Plan**") || !strings.Contains(item.Content, "□ Render detailed page controls") { + t.Fatalf("expected formatted plan summary, got %q", item.Content) + } + if !strings.Contains(item.Content, "✅ Add active state fields") || !strings.Contains(item.Content, "□ Run tests and update memo") { + t.Fatalf("expected icon-based plan markers, got %q", item.Content) + } + if !strings.Contains(item.Content, "**Explanation**") || !strings.Contains(item.Content, "Keep the session page in sync with /active.") { + t.Fatalf("expected explanation summary, got %q", item.Content) + } + if strings.Contains(item.Content, "\"status\"") || strings.Contains(item.Content, "**Arguments**") || strings.Contains(item.Content, "\"step\"") { + t.Fatalf("expected raw arguments to stay hidden, got %q", item.Content) + } +} + +func TestReclassifyToolWarning(t *testing.T) { + base := t.TempDir() + filePath := filepath.Join(base, "session.jsonl") + data := "" + + "{\"timestamp\":\"2026-01-09T01:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"Warning: apply_patch was requested via exec_command. Use the apply_patch tool instead of exec_command.\"}]}}\n" + + if err := os.WriteFile(filePath, []byte(data), 0o644); err != nil { + t.Fatalf("write: %v", err) + } + + session, err := ParseSession(filePath) + if err != nil { + t.Fatalf("parse: %v", err) + } + if len(session.Items) != 1 { + t.Fatalf("expected 1 item, got %d", len(session.Items)) + } + item := session.Items[0] + if item.Role != "assistant" { + t.Fatalf("expected assistant role, got %q", item.Role) + } + if item.Class != "role-assistant" { + t.Fatalf("expected role-assistant class, got %q", item.Class) + } + if item.Title != "Agent" { + t.Fatalf("expected Agent title, got %q", item.Title) + } +} + +func TestToolWarningWithExtraTextStaysUser(t *testing.T) { + base := t.TempDir() + filePath := filepath.Join(base, "session.jsonl") + data := "" + + "{\"timestamp\":\"2026-01-09T01:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"Warning: apply_patch was requested via exec_command. Use the apply_patch tool instead of exec_command.\\n\\nExtra note\"}]}}\n" + + if err := os.WriteFile(filePath, []byte(data), 0o644); err != nil { + t.Fatalf("write: %v", err) + } + + session, err := ParseSession(filePath) + if err != nil { + t.Fatalf("parse: %v", err) + } + if len(session.Items) != 1 { + t.Fatalf("expected 1 item, got %d", len(session.Items)) + } + item := session.Items[0] + if item.Role != "user" { + t.Fatalf("expected user role, got %q", item.Role) + } + if item.Class != "role-user" { + t.Fatalf("expected role-user class, got %q", item.Class) + } + if item.Title != "User" { + t.Fatalf("expected User title, got %q", item.Title) + } +} diff --git a/internal/sessions/subagent_notification.go b/internal/sessions/subagent_notification.go new file mode 100644 index 0000000..ce08a67 --- /dev/null +++ b/internal/sessions/subagent_notification.go @@ -0,0 +1,162 @@ +package sessions + +import ( + "encoding/json" + "sort" + "strings" +) + +const ( + subagentNotificationOpenTag = "" + subagentNotificationCloseTag = "" +) + +// SubagentNotification represents a parsed subagent notification block. +type SubagentNotification struct { + AgentID string + StatusType string + StatusText string +} + +// ExtractSubagentNotification returns the parsed notification when the content +// is a single block. +func ExtractSubagentNotification(content string) (SubagentNotification, bool) { + trimmed := strings.TrimSpace(content) + if trimmed == "" || !strings.HasPrefix(trimmed, subagentNotificationOpenTag) { + return SubagentNotification{}, false + } + + closeIdx := strings.Index(trimmed, subagentNotificationCloseTag) + if closeIdx == -1 { + return SubagentNotification{}, false + } + if rest := strings.TrimSpace(trimmed[closeIdx+len(subagentNotificationCloseTag):]); rest != "" { + return SubagentNotification{}, false + } + + body := strings.TrimSpace(trimmed[len(subagentNotificationOpenTag):closeIdx]) + if body == "" { + return SubagentNotification{}, false + } + + var payload struct { + AgentID string `json:"agent_id"` + Status map[string]json.RawMessage `json:"status"` + } + if err := json.Unmarshal([]byte(body), &payload); err != nil { + return SubagentNotification{}, false + } + if strings.TrimSpace(payload.AgentID) == "" { + return SubagentNotification{}, false + } + + statusType, statusText := extractSubagentStatus(payload.Status) + return SubagentNotification{ + AgentID: strings.TrimSpace(payload.AgentID), + StatusType: statusType, + StatusText: statusText, + }, true +} + +func extractSubagentStatus(status map[string]json.RawMessage) (string, string) { + if len(status) == 0 { + return "", "" + } + + preferred := []string{"completed", "failed", "running", "started", "queued"} + for _, key := range preferred { + raw, ok := status[key] + if !ok { + continue + } + return key, decodeSubagentStatus(raw) + } + + keys := make([]string, 0, len(status)) + for key := range status { + keys = append(keys, key) + } + sort.Strings(keys) + key := keys[0] + return key, decodeSubagentStatus(status[key]) +} + +func decodeSubagentStatus(raw json.RawMessage) string { + var text string + if err := json.Unmarshal(raw, &text); err == nil { + return strings.TrimSpace(text) + } + + var payload struct { + Text string `json:"text"` + Message string `json:"message"` + } + if err := json.Unmarshal(raw, &payload); err == nil { + switch { + case strings.TrimSpace(payload.Text) != "": + return strings.TrimSpace(payload.Text) + case strings.TrimSpace(payload.Message) != "": + return strings.TrimSpace(payload.Message) + } + } + + return strings.TrimSpace(prettyJSON(string(raw))) +} + +func extractSpawnAgentRequest(arguments string) string { + if strings.TrimSpace(arguments) == "" { + return "" + } + + var payload struct { + Message string `json:"message"` + Items []struct { + Type string `json:"type"` + Text string `json:"text"` + Name string `json:"name"` + } `json:"items"` + } + if err := json.Unmarshal([]byte(arguments), &payload); err != nil { + return "" + } + if strings.TrimSpace(payload.Message) != "" { + return strings.TrimSpace(payload.Message) + } + + parts := make([]string, 0, len(payload.Items)) + for _, item := range payload.Items { + switch { + case strings.TrimSpace(item.Text) != "": + parts = append(parts, strings.TrimSpace(item.Text)) + case item.Type == "skill" && strings.TrimSpace(item.Name) != "": + parts = append(parts, strings.TrimSpace(item.Name)) + } + } + return strings.TrimSpace(strings.Join(parts, "\n\n")) +} + +func extractSpawnedAgent(output string) (agentID, nickname string, ok bool) { + if strings.TrimSpace(output) == "" { + return "", "", false + } + + var payload struct { + AgentID string `json:"agent_id"` + Nickname string `json:"nickname"` + } + if err := json.Unmarshal([]byte(output), &payload); err != nil { + return "", "", false + } + if strings.TrimSpace(payload.AgentID) == "" { + return "", "", false + } + return strings.TrimSpace(payload.AgentID), strings.TrimSpace(payload.Nickname), true +} + +func renderSubagentSpawnOutput(output string) string { + trimmed := strings.TrimSpace(output) + if trimmed == "" { + return "(empty)" + } + return "```json\n" + prettyJSON(trimmed) + "\n```" +} diff --git a/internal/sessions/turn_aborted.go b/internal/sessions/turn_aborted.go new file mode 100644 index 0000000..f2316f3 --- /dev/null +++ b/internal/sessions/turn_aborted.go @@ -0,0 +1,30 @@ +package sessions + +import "strings" + +const ( + turnAbortedOpenTag = "" + turnAbortedCloseTag = "" +) + +// ExtractTurnAbortedMessage returns the message inside a block. +// It only matches when the content is a single turn_aborted block. +func ExtractTurnAbortedMessage(content string) (string, bool) { + trimmed := strings.TrimSpace(content) + if trimmed == "" { + return "", false + } + if !strings.HasPrefix(trimmed, turnAbortedOpenTag) { + return "", false + } + closeIdx := strings.Index(trimmed, turnAbortedCloseTag) + if closeIdx == -1 { + return "", false + } + rest := strings.TrimSpace(trimmed[closeIdx+len(turnAbortedCloseTag):]) + if rest != "" { + return "", false + } + body := strings.TrimSpace(trimmed[len(turnAbortedOpenTag):closeIdx]) + return body, true +} diff --git a/internal/web/server.go b/internal/web/server.go index b720a3c..c6ab030 100644 --- a/internal/web/server.go +++ b/internal/web/server.go @@ -10,6 +10,8 @@ import ( "fmt" "html" "html/template" + "io" + "log" "math" "net" "net/http" @@ -19,9 +21,13 @@ import ( "sort" "strconv" "strings" + "sync" "time" + "codex-manager/internal/active" + "codex-manager/internal/notifications" "codex-manager/internal/render" + "codex-manager/internal/repooverride" "codex-manager/internal/search" "codex-manager/internal/sessions" @@ -33,30 +39,40 @@ type htmlBucketUploader interface { Upload(ctx context.Context, html string) (string, error) } +const activeTimeZoneCookie = "codex_tz" +const hookMaxBodyBytes = 1 << 20 + // Server serves the HTML views. type Server struct { - idx *sessions.Index - search *search.Index - renderer *render.Renderer - sessionsDir string - shareDir string - shareAddr string - themeClass string - useTailscale bool - tailscaleHost string - htmlBucket htmlBucketUploader + idx *sessions.Index + search *search.Index + active *active.Index + activeState *active.StateStore + notifications *notifications.Store + repoOverrides *repooverride.Store + renderer *render.Renderer + sessionsDir string + shareDir string + shareAddr string + themeClass string + useTailscale bool + tailscaleHost string + htmlBucket htmlBucketUploader + activeRefreshMaxAge time.Duration + activeRefreshMu sync.Mutex } // NewServer wires up the HTTP server. func NewServer(idx *sessions.Index, searchIdx *search.Index, renderer *render.Renderer, sessionsDir, shareDir, shareAddr string, theme int) *Server { return &Server{ - idx: idx, - search: searchIdx, - renderer: renderer, - sessionsDir: sessionsDir, - shareDir: shareDir, - shareAddr: shareAddr, - themeClass: themeClass(theme), + idx: idx, + search: searchIdx, + renderer: renderer, + sessionsDir: sessionsDir, + shareDir: shareDir, + shareAddr: shareAddr, + themeClass: themeClass(theme), + activeRefreshMaxAge: 15 * time.Second, } } @@ -71,6 +87,25 @@ func (s *Server) EnableHTMLBucket(client htmlBucketUploader) { s.htmlBucket = client } +// EnableActive configures active-thread summaries and persisted state. +func (s *Server) EnableActive(activeIdx *active.Index, state *active.StateStore, refreshMaxAge time.Duration) { + s.active = activeIdx + s.activeState = state + if refreshMaxAge > 0 { + s.activeRefreshMaxAge = refreshMaxAge + } +} + +// EnableNotifications configures the webhook notification log. +func (s *Server) EnableNotifications(store *notifications.Store) { + s.notifications = store +} + +// EnableRepoOverrides configures repository URL overrides keyed by cwd prefix. +func (s *Server) EnableRepoOverrides(store *repooverride.Store) { + s.repoOverrides = store +} + func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) { pathValue := strings.Trim(r.URL.Path, "/") if pathValue == "" { @@ -81,10 +116,30 @@ func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) { s.handleDir(w, r) return } + if pathValue == "active" { + s.handleActive(w, r) + return + } + if pathValue == "active/state" { + s.handleActiveState(w, r) + return + } + if pathValue == "hook" { + s.handleHook(w, r) + return + } + if pathValue == "notifications" { + s.handleNotifications(w, r) + return + } if pathValue == "search" { s.handleSearch(w, r) return } + if strings.HasPrefix(pathValue, "markdown/") { + s.handleSessionMarkdown(w, r, strings.TrimPrefix(pathValue, "markdown/")) + return + } if strings.HasPrefix(pathValue, "raw/") { s.handleRaw(w, r, strings.TrimPrefix(pathValue, "raw/")) return @@ -122,11 +177,28 @@ type dirView struct { } type sessionView struct { - Name string - Size string - ModTime string - ResumeCommand string - Cwd string + Name string + DisplayName string + Size string + ModTime string + ModTimeOnly string + ResumeCommand string + Cwd string + Branch string + BranchURL string + DateLabel string + DatePath string + ThreadStateKey string + ThreadStatusLabel string + ThreadStatusClass string + ThreadAction string + ThreadActionLabel string + LastUserSnippet string + LastUserSnippetTitle string + LastUserSnippetClass string + LastAssistantSnippet string + LastAssistantSnippetTitle string + LastAssistantSnippetClass string } type indexView struct { @@ -145,6 +217,17 @@ type dayView struct { Dirs []dirView SelectedCwd string SelectedCwdLabel string + ActiveTabs []activeTabView + FallbackDate *dateView + FallbackSessions []sessionView + FallbackDirs []dirView + Page int + TotalPages int + HasPrev bool + HasNext bool + PrevPage int + NextPage int + ShowAll bool View string ThemeClass string } @@ -152,33 +235,162 @@ type dayView struct { type dirPageView struct { Dir dirView Dates []dateView + Sessions []sessionView + Page int + TotalPages int + HasPrev bool + HasNext bool + PrevPage int + NextPage int + ShowAll bool ThemeClass string } type sessionPageView struct { - Date dateView - File sessionView - Meta *sessions.SessionMeta - Items []itemView - AllMarkdown string - ResumeCommand string - ThemeClass string - IsJSONL bool - LastUserLine int + Date dateView + File sessionView + Meta *sessions.SessionMeta + IsSubagentThread bool + SubagentDisplayName string + SubagentDisplayRole string + ParentThreadID string + ParentSessionPath string + ParentSessionTitle string + UserNavLabel string + Items []itemView + ResumeCommand string + ThreadStateKey string + ThreadStatusLabel string + ThreadStatusClass string + ThreadAction string + ThreadActionLabel string + ThemeClass string + IsJSONL bool + LastUserLine int + LastAgentLine int + LastItemLine int +} + +type activeTabView struct { + Label string + Path string + Active bool +} + +type activeSessionView struct { + Key string + DisplayName string + DetailPath string + DateLabel string + ShowDateDivider bool + Cwd string + Branch string + LastActivity string + ResumeCommand string + HasResumeCommand bool + StatusLabel string + StatusClass string + Action string + ActionLabel string + LastUserSnippet string + LastUserSnippetTitle string + LastUserSnippetClass string + LastAssistantSnippet string + LastAssistantSnippetTitle string + LastAssistantSnippetClass string +} + +type activePageView struct { + Heading string + Scope string + Tabs []activeTabView + Threads []activeSessionView + EmptyMessage string + ThreadCount int + ShowDayNav bool + SelectedDate string + SelectedCwd string + SelectedCwdLabel string + PrevDayPath string + NextDayPath string + LastScan string + TimeZone string + ThemeClass string +} + +type notificationHeaderView struct { + Key string + Value string +} + +type notificationEntryView struct { + ID string + ReceivedAt string + Method string + Path string + ContentType string + UserAgent string + RemoteAddr string + SizeLabel string + Preview string + Body string + RawBody string + IsJSON bool + Headers []notificationHeaderView +} + +type notificationsPageView struct { + Entries []notificationEntryView + LastScan string + ThemeClass string + EmptyMessage string } type itemView struct { - Line int - Timestamp string - Type string - Subtype string - Role string - Title string - Content string - Class string - AutoCtx bool - Markdown string - HTML template.HTML + Line int + Timestamp string + Type string + Subtype string + Role string + RoleLabel string + SpeakerClass string + Title string + Content string + Class string + AutoCtx bool + IsTurnAborted bool + TurnAbortedMessage string + SpeakerName string + SpeakerRole string + SubagentID string + SubagentNickname string + SubagentStatusType string + SubagentRequest string + SubagentSessionPath string + SubagentSessionTitle string + Markdown string + SubagentRequestHTML template.HTML + HTML template.HTML + ToolRunCallTitle string + ToolRunOutputLine int + ToolRunOutputTitle string + ToolRunOutputHTML template.HTML + ToolRunOutputTime string + ToolRunGroupTitle string + ToolRunGroupCount int + ToolRunGroupLastLine int + ToolRunGroupEnd bool + ToolRunHideHeader bool +} + +type updatePlanHTMLArgs struct { + Explanation string `json:"explanation"` + Plan []updatePlanHTMLStep `json:"plan"` +} + +type updatePlanHTMLStep struct { + Status string `json:"status"` + Step string `json:"step"` } func (s *Server) handleIndex(w http.ResponseWriter, r *http.Request) { @@ -211,6 +423,20 @@ func (s *Server) handleDir(w http.ResponseWriter, r *http.Request) { for _, file := range files { counts[file.Date]++ } + sort.Slice(files, func(i, j int) bool { + if files[i].ModTime.Equal(files[j].ModTime) { + dateI := files[i].Date.String() + dateJ := files[j].Date.String() + if dateI != dateJ { + return dateI > dateJ + } + return files[i].Name > files[j].Name + } + return files[i].ModTime.After(files[j].ModTime) + }) + page := parsePageParam(r) + showAll := parseBoolParam(r, "all") + sessionsView, pager := s.buildSessionViewsPage(files, page, 10, showAll) dates := s.idx.Dates() dateViews := make([]dateView, 0, len(counts)) @@ -233,6 +459,14 @@ func (s *Server) handleDir(w http.ResponseWriter, r *http.Request) { view := dirPageView{ Dir: dir, Dates: dateViews, + Sessions: sessionsView, + Page: pager.Page, + TotalPages: pager.TotalPages, + HasPrev: pager.HasPrev, + HasNext: pager.HasNext, + PrevPage: pager.PrevPage, + NextPage: pager.NextPage, + ShowAll: showAll, ThemeClass: s.themeClass, } @@ -240,62 +474,206 @@ func (s *Server) handleDir(w http.ResponseWriter, r *http.Request) { _ = s.renderer.Execute(w, "dir", view) } +func (s *Server) handleActive(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet { + http.NotFound(w, r) + return + } + if s.active == nil { + http.Error(w, "active index not available", http.StatusServiceUnavailable) + return + } + if err := s.refreshActiveIfStale(); err != nil { + log.Printf("active refresh failed: %v", err) + } + + view := s.buildActivePageView(r) + templateName := "active" + if parseBoolParam(r, "partial") { + templateName = "active_content" + } + + w.Header().Set("Content-Type", "text/html; charset=utf-8") + _ = s.renderer.Execute(w, templateName, view) +} + +func (s *Server) handleActiveState(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.NotFound(w, r) + return + } + if s.active == nil || s.activeState == nil { + http.Error(w, "active state not available", http.StatusServiceUnavailable) + return + } + if err := s.refreshActiveIfStale(); err != nil { + log.Printf("active refresh failed before state change: %v", err) + } + if err := r.ParseForm(); err != nil { + writeJSONError(w, http.StatusBadRequest, "invalid form payload") + return + } + + action := strings.TrimSpace(r.Form.Get("action")) + key := strings.TrimSpace(r.Form.Get("key")) + if key == "" { + writeJSONError(w, http.StatusBadRequest, "missing thread key") + return + } + + var err error + switch action { + case "end": + summary, ok := s.active.Lookup(key) + if !ok { + writeJSONError(w, http.StatusNotFound, "thread not found") + return + } + err = s.activeState.MarkEnded(key, summary.ActivityToken) + case "reopen": + err = s.activeState.Reopen(key) + default: + writeJSONError(w, http.StatusBadRequest, "invalid action") + return + } + if err != nil { + writeJSONError(w, http.StatusInternalServerError, err.Error()) + return + } + + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(map[string]any{"ok": true}) +} + +func (s *Server) handleHook(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.NotFound(w, r) + return + } + if s.notifications == nil { + http.Error(w, "notification store not available", http.StatusServiceUnavailable) + return + } + + r.Body = http.MaxBytesReader(w, r.Body, hookMaxBodyBytes) + body, err := io.ReadAll(r.Body) + if err != nil { + http.Error(w, "failed to read request body", http.StatusBadRequest) + return + } + + headers := make(map[string][]string, len(r.Header)) + for key, values := range r.Header { + headers[key] = append([]string(nil), values...) + } + if _, err := s.notifications.AppendRequest(r.Method, r.URL.Path, r.Header.Get("Content-Type"), r.UserAgent(), r.RemoteAddr, headers, body); err != nil { + http.Error(w, "failed to store notification", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusNoContent) +} + +func (s *Server) handleNotifications(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet { + http.NotFound(w, r) + return + } + if s.notifications == nil { + http.Error(w, "notification store not available", http.StatusServiceUnavailable) + return + } + + view := s.buildNotificationsPageView(r) + templateName := "notifications" + if parseBoolParam(r, "partial") { + templateName = "notifications_content" + } + + w.Header().Set("Content-Type", "text/html; charset=utf-8") + _ = s.renderer.Execute(w, templateName, view) +} + func (s *Server) handleDay(w http.ResponseWriter, r *http.Request, parts []string) { date, ok := sessions.ParseDate(parts[0], parts[1], parts[2]) if !ok { http.NotFound(w, r) return } + if s.active != nil { + if err := s.refreshActiveIfStale(); err != nil { + log.Printf("active refresh failed during day render: %v", err) + } + } selectedCwd := normalizeCwdParam(r.URL.Query().Get("cwd")) viewMode := strings.TrimSpace(r.URL.Query().Get("view")) if viewMode != "dir" { viewMode = "sessions" } - files := s.idx.SessionsByDate(date) - dirViews := buildDirViewsFromFiles(files) - - filtered := files - if selectedCwd != "" { - filtered = make([]sessions.SessionFile, 0, len(files)) - for _, file := range files { - if sessions.CwdForFile(file) == selectedCwd { - filtered = append(filtered, file) + requestedFiles := s.idx.SessionsByDate(date) + filtered := filterSessionFilesByCwd(requestedFiles, selectedCwd) + requestedViews := []sessionView{} + dirViews := buildDirViewsFromFiles(requestedFiles) + page := parsePageParam(r) + showAll := parseBoolParam(r, "all") + pager := paginationInfo{Page: page} + var fallbackDate *dateView + var fallbackSessions []sessionView + var fallbackDirs []dirView + if selectedCwd != "" && len(filtered) == 0 { + if prevDate, ok := previousDateKey(date); ok { + prevFiles := s.idx.SessionsByDate(prevDate) + prevFiltered := filterSessionFilesByCwd(prevFiles, selectedCwd) + if len(prevFiltered) > 0 { + fallbackDate = &dateView{ + Label: prevDate.String(), + Path: prevDate.Path(), + Count: len(prevFiles), + } + pageSessions, pageInfo := s.buildSessionViewsPage(prevFiltered, page, 10, showAll) + pager = pageInfo + fallbackSessions = pageSessions + fallbackDirs = buildDirViewsFromFiles(prevFiles) } } } - - views := make([]sessionView, 0, len(filtered)) - for _, file := range filtered { - resumeCommand := buildResumeCommand(file.Meta) - cwd := sessions.CwdForFile(file) - if cwd == sessions.UnknownCwd { - cwd = "" - } - views = append(views, sessionView{ - Name: file.Name, - Size: formatBytes(file.Size), - ModTime: formatTime(file.ModTime), - ResumeCommand: resumeCommand, - Cwd: cwd, - }) + if fallbackDate == nil { + pageSessions, pageInfo := s.buildSessionViewsPage(filtered, page, 10, showAll) + pager = pageInfo + requestedViews = pageSessions } selectedLabel := "" if selectedCwd != "" { selectedLabel = dirLabel(selectedCwd) } + var activeTabs []activeTabView + if selectedCwd != "" && s.active != nil { + activeTabs = buildActiveTabs("day", date.String(), time.Now().Format("2006-01-02"), selectedCwd) + } view := dayView{ Date: dateView{ Label: date.String(), Path: date.Path(), - Count: len(files), + Count: len(requestedFiles), }, - Sessions: views, + Sessions: requestedViews, Dirs: dirViews, SelectedCwd: selectedCwd, SelectedCwdLabel: selectedLabel, + ActiveTabs: activeTabs, + FallbackDate: fallbackDate, + FallbackSessions: fallbackSessions, + FallbackDirs: fallbackDirs, + Page: pager.Page, + TotalPages: pager.TotalPages, + HasPrev: pager.HasPrev, + HasNext: pager.HasNext, + PrevPage: pager.PrevPage, + NextPage: pager.NextPage, + ShowAll: showAll, View: viewMode, ThemeClass: s.themeClass, } @@ -305,6 +683,11 @@ func (s *Server) handleDay(w http.ResponseWriter, r *http.Request, parts []strin } func (s *Server) handleSession(w http.ResponseWriter, r *http.Request, parts []string) { + if s.active != nil { + if err := s.refreshActiveIfStale(); err != nil { + log.Printf("active refresh failed during session render: %v", err) + } + } view, err := s.buildSessionView(parts) if err != nil { http.NotFound(w, r) @@ -315,6 +698,50 @@ func (s *Server) handleSession(w http.ResponseWriter, r *http.Request, parts []s _ = s.renderer.Execute(w, "session", view) } +func (s *Server) handleSessionMarkdown(w http.ResponseWriter, r *http.Request, path string) { + if r.Method != http.MethodGet { + http.NotFound(w, r) + return + } + + parts := strings.Split(path, "/") + if len(parts) != 4 { + http.NotFound(w, r) + return + } + + view, err := s.buildSessionView(parts) + if err != nil { + http.NotFound(w, r) + return + } + + line := 0 + if rawLine := strings.TrimSpace(r.URL.Query().Get("line")); rawLine != "" { + parsed, err := strconv.Atoi(rawLine) + if err != nil || parsed <= 0 { + http.Error(w, "invalid line", http.StatusBadRequest) + return + } + line = parsed + } + + var markdown string + if line > 0 { + var ok bool + markdown, ok = sessionItemMarkdown(view.Items, line) + if !ok { + http.NotFound(w, r) + return + } + } else { + markdown = joinItemMarkdown(view.Items) + } + + w.Header().Set("Content-Type", "text/plain; charset=utf-8") + _, _ = io.WriteString(w, markdown) +} + type searchResponse struct { Query string `json:"query"` Results []search.Result `json:"results"` @@ -331,6 +758,7 @@ func (s *Server) handleSearch(w http.ResponseWriter, r *http.Request) { } query := strings.TrimSpace(r.URL.Query().Get("query")) + cwdFilter := normalizeSearchCwdFilter(r.URL.Query().Get("cwd")) limit := 50 if rawLimit := r.URL.Query().Get("limit"); rawLimit != "" { if parsed, err := strconv.Atoi(rawLimit); err == nil && parsed > 0 { @@ -343,7 +771,7 @@ func (s *Server) handleSearch(w http.ResponseWriter, r *http.Request) { var results []search.Result if len(query) >= 2 { - results = s.search.Search(query, limit) + results = s.search.SearchWithCwd(query, limit, cwdFilter) } else { results = []search.Result{} } @@ -459,6 +887,13 @@ func formatTime(t time.Time) string { return t.Format("2006-01-02 15:04:05") } +func formatTimeOnly(t time.Time) string { + if t.IsZero() { + return "" + } + return t.Format("15:04:05") +} + func formatScanTime(t time.Time) string { if t.IsZero() { return "never" @@ -466,6 +901,87 @@ func formatScanTime(t time.Time) string { return t.Format(time.RFC3339) } +type paginationInfo struct { + Page int + TotalPages int + HasPrev bool + HasNext bool + PrevPage int + NextPage int +} + +func parsePageParam(r *http.Request) int { + page := 1 + if rawPage := r.URL.Query().Get("page"); rawPage != "" { + if parsed, err := strconv.Atoi(rawPage); err == nil { + page = parsed + } + } + if page < 1 { + page = 1 + } + return page +} + +func parseBoolParam(r *http.Request, key string) bool { + value := strings.TrimSpace(strings.ToLower(r.URL.Query().Get(key))) + switch value { + case "1", "true", "yes", "on": + return true + default: + return false + } +} + +func paginateSessionFiles(files []sessions.SessionFile, page int, perPage int) ([]sessions.SessionFile, paginationInfo) { + info := paginationInfo{Page: page} + total := len(files) + if perPage <= 0 { + info.TotalPages = 1 + info.HasPrev = false + info.HasNext = false + info.PrevPage = 1 + info.NextPage = 1 + return files, info + } + if total == 0 { + info.Page = 1 + info.TotalPages = 0 + info.PrevPage = 1 + info.NextPage = 1 + return nil, info + } + totalPages := (total + perPage - 1) / perPage + if page > totalPages { + page = totalPages + } + if page < 1 { + page = 1 + } + start := (page - 1) * perPage + end := start + perPage + if start < 0 { + start = 0 + } + if end > total { + end = total + } + pageFiles := files[start:end] + info.Page = page + info.TotalPages = totalPages + info.HasPrev = page > 1 + info.HasNext = page < totalPages + info.PrevPage = 1 + info.NextPage = totalPages + if info.HasPrev { + info.PrevPage = page - 1 + } + if info.HasNext { + info.NextPage = page + 1 + } + return pageFiles, info +} + func (s *Server) buildIndexView(view string, heatMode string) indexView { heatMode = parseHeatMode(heatMode) dates := s.idx.Dates() @@ -512,88 +1028,686 @@ func (s *Server) buildIndexView(view string, heatMode string) indexView { } } -func buildDirViewsFromFiles(files []sessions.SessionFile) []dirView { - counts := make(map[string]int, len(files)) - for _, file := range files { - cwd := sessions.CwdForFile(file) - counts[cwd]++ +func (s *Server) refreshActiveIfStale() error { + if s.active == nil || s.idx == nil { + return nil + } + maxAge := s.activeRefreshMaxAge + if maxAge <= 0 { + maxAge = 15 * time.Second + } + if time.Since(s.idx.LastUpdated()) < maxAge && time.Since(s.active.LastUpdated()) < maxAge { + return nil } - return buildDirViewsFromCounts(counts, nil, 0, false) -} -func buildDirViewsFromCounts(counts map[string]int, recentCounts map[string]int, recentMax int, withHeat bool) []dirView { - keys := make([]string, 0, len(counts)) - for key := range counts { - keys = append(keys, key) + s.activeRefreshMu.Lock() + defer s.activeRefreshMu.Unlock() + + if time.Since(s.idx.LastUpdated()) < maxAge && time.Since(s.active.LastUpdated()) < maxAge { + return nil } - sort.Slice(keys, func(i, j int) bool { - if keys[i] == sessions.UnknownCwd { - return false + if err := s.idx.Refresh(); err != nil { + return err + } + if err := s.active.RefreshFrom(s.idx); err != nil { + return err + } + if s.activeState != nil { + if err := s.activeState.Reconcile(s.active.Summaries()); err != nil { + return err } - if keys[j] == sessions.UnknownCwd { - return true + } + return nil +} + +func (s *Server) buildActivePageView(r *http.Request) activePageView { + loc, tz := activeLocation(r) + scope := parseActiveScope(r) + selectedDate := parseActiveDate(r, loc) + selectedDateLabel := selectedDate.Format("2006-01-02") + todayLabel := startOfDay(time.Now().In(loc)).Format("2006-01-02") + selectedCwd := normalizeCwdParam(r.URL.Query().Get("cwd")) + selectedCwdLabel := "" + if selectedCwd != "" { + selectedCwdLabel = dirLabel(selectedCwd) + } + + summaries := []active.Summary{} + if s.active != nil { + summaries = s.active.Summaries() + } + if s.activeState != nil { + if err := s.activeState.Reconcile(summaries); err != nil { + log.Printf("active state reconcile failed during render: %v", err) } - return keys[i] < keys[j] - }) + } + endedMarks := map[string]active.EndedMark{} + if s.activeState != nil { + endedMarks = s.activeState.Snapshot() + } - views := make([]dirView, 0, len(keys)) - for _, key := range keys { - view := dirView{ - Label: dirLabel(key), - Value: key, - Count: counts[key], + threads := make([]activeSessionView, 0, len(summaries)) + for _, summary := range summaries { + ended := false + if mark, ok := endedMarks[summary.Key]; ok { + ended = mark.ActivityToken == "" || mark.ActivityToken == summary.ActivityToken } - if withHeat { - view.RecentCount = recentCounts[key] - view.HeatColor = heatColor(view.RecentCount, recentMax) + if !matchesActiveCwd(summary, selectedCwd) { + continue + } + if !matchesActiveScope(summary, scope, selectedDateLabel, loc, ended) { + continue + } + threads = append(threads, buildActiveSessionRow(summary, ended, loc)) + } + if scope != "day" { + lastDateLabel := "" + for i := range threads { + if threads[i].DateLabel != lastDateLabel { + threads[i].ShowDateDivider = true + lastDateLabel = threads[i].DateLabel + } } - views = append(views, view) } - return views -} -func dirLabel(cwd string) string { - if sessions.NormalizeCwd(cwd) == sessions.UnknownCwd { - return "Unknown (no CWD)" + heading, emptyMessage := activeHeading(scope, selectedDateLabel, todayLabel) + prevDayPath := "" + nextDayPath := "" + if scope == "day" { + prevDayPath = buildActivePagePath("day", selectedDate.AddDate(0, 0, -1).Format("2006-01-02"), todayLabel, selectedCwd) + nextDayPath = buildActivePagePath("day", selectedDate.AddDate(0, 0, 1).Format("2006-01-02"), todayLabel, selectedCwd) + } + + return activePageView{ + Heading: heading, + Scope: scope, + Tabs: buildActiveTabs(scope, selectedDateLabel, todayLabel, selectedCwd), + Threads: threads, + EmptyMessage: emptyMessage, + ThreadCount: len(threads), + ShowDayNav: scope == "day", + SelectedDate: selectedDateLabel, + SelectedCwd: selectedCwd, + SelectedCwdLabel: selectedCwdLabel, + PrevDayPath: prevDayPath, + NextDayPath: nextDayPath, + LastScan: formatScanTime(maxTime(s.idx.LastUpdated(), s.active.LastUpdated())), + TimeZone: tz, + ThemeClass: s.themeClass, } - return cwd } -func displayCwd(cwd string) string { - if sessions.NormalizeCwd(cwd) == sessions.UnknownCwd { - return "" +func (s *Server) buildNotificationsPageView(r *http.Request) notificationsPageView { + loc, _ := activeLocation(r) + rawEntries := []notifications.Entry{} + if s.notifications != nil { + rawEntries = s.notifications.Entries() + } + entries := make([]notificationEntryView, 0, len(rawEntries)) + for _, entry := range rawEntries { + headers := make([]notificationHeaderView, 0, len(entry.Headers)) + headerKeys := make([]string, 0, len(entry.Headers)) + for key := range entry.Headers { + headerKeys = append(headerKeys, key) + } + sort.Strings(headerKeys) + for _, key := range headerKeys { + headers = append(headers, notificationHeaderView{ + Key: key, + Value: strings.Join(entry.Headers[key], ", "), + }) + } + + body := entry.PrettyBody + if body == "" { + body = entry.Body + } + entries = append(entries, notificationEntryView{ + ID: entry.ID, + ReceivedAt: formatTime(entry.ReceivedAt.In(loc)), + Method: entry.Method, + Path: entry.Path, + ContentType: entry.ContentType, + UserAgent: entry.UserAgent, + RemoteAddr: entry.RemoteAddr, + SizeLabel: formatBytes(int64(entry.Size)), + Preview: entry.Preview, + Body: body, + RawBody: entry.Body, + IsJSON: entry.IsJSON, + Headers: headers, + }) + } + lastScan := time.Now() + return notificationsPageView{ + Entries: entries, + LastScan: formatScanTime(lastScan), + ThemeClass: s.themeClass, + EmptyMessage: "No notifications received yet.", } - return cwd } -func (s *Server) recentCwdCounts(since time.Time) (map[string]int, int) { - counts := map[string]int{} - max := 0 - for _, date := range s.idx.Dates() { - files := s.idx.SessionsByDate(date) - for _, file := range files { - if file.ModTime.Before(since) { - continue +func activeLocation(r *http.Request) (*time.Location, string) { + if r != nil { + if cookie, err := r.Cookie(activeTimeZoneCookie); err == nil { + value := strings.TrimSpace(cookie.Value) + if decoded, err := url.QueryUnescape(value); err == nil { + value = decoded } - cwd := sessions.CwdForFile(file) - counts[cwd]++ - if counts[cwd] > max { - max = counts[cwd] + if value != "" { + if loc, err := time.LoadLocation(value); err == nil { + return loc, value + } } } } - return counts, max + if time.Local != nil { + return time.Local, time.Local.String() + } + return time.UTC, time.UTC.String() } -func (s *Server) recentCwdCountsFromLatestDates(limit int) (map[string]int, int) { - counts := map[string]int{} - max := 0 - if limit <= 0 { - return counts, max +func parseActiveScope(r *http.Request) string { + value := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("scope"))) + switch value { + case "all": + return "all" + case "ended": + return "ended" + default: + return "day" } - dates := s.idx.Dates() - if len(dates) > limit { - dates = dates[:limit] +} + +func parseActiveDate(r *http.Request, loc *time.Location) time.Time { + if loc == nil { + loc = time.UTC + } + value := strings.TrimSpace(r.URL.Query().Get("date")) + if value == "" { + return startOfDay(time.Now().In(loc)) + } + if parsed, err := time.ParseInLocation("2006-01-02", value, loc); err == nil { + return startOfDay(parsed) + } + return startOfDay(time.Now().In(loc)) +} + +func startOfDay(t time.Time) time.Time { + if t.IsZero() { + return time.Time{} + } + return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location()) +} + +func activeDateString(t time.Time, loc *time.Location) string { + if t.IsZero() { + return "" + } + if loc != nil { + t = t.In(loc) + } + return t.Format("2006-01-02") +} + +func matchesActiveScope(summary active.Summary, scope string, selectedDate string, loc *time.Location, ended bool) bool { + switch scope { + case "all": + return !ended + case "ended": + return ended + default: + return !ended && activeDateString(summary.LastActivityAt, loc) == selectedDate + } +} + +func matchesActiveCwd(summary active.Summary, selectedCwd string) bool { + if selectedCwd == "" { + return true + } + return sessions.NormalizeCwd(summary.Cwd) == sessions.NormalizeCwd(selectedCwd) +} + +func buildActiveSessionRow(summary active.Summary, ended bool, loc *time.Location) activeSessionView { + lastActivity := summary.LastActivityAt + if lastActivity.IsZero() { + lastActivity = summary.ModTime + } + if loc != nil && !lastActivity.IsZero() { + lastActivity = lastActivity.In(loc) + } + + statusLabel, statusClass := activeStatus(summary.WaitState, ended) + action := "end" + actionLabel := "⏹️ End" + if ended { + action = "reopen" + actionLabel = "↩️ Reopen" + } + + userSnippet := summary.LastUserSnippet + if summary.HasUserMessage && userSnippet.Text == "" { + userSnippet.Text = "(empty)" + } + assistantSnippet := summary.LastAssistantSnippet + + return activeSessionView{ + Key: summary.Key, + DisplayName: summary.DisplayName, + DetailPath: "/" + summary.Date.Path() + "/" + summary.Name + "#last-item", + DateLabel: summary.Date.String(), + Cwd: displayCwd(summary.Cwd), + Branch: summary.Branch, + LastActivity: formatTime(lastActivity), + ResumeCommand: summary.ResumeCommand, + HasResumeCommand: summary.ResumeCommand != "", + StatusLabel: statusLabel, + StatusClass: statusClass, + Action: action, + ActionLabel: actionLabel, + LastUserSnippet: userSnippet.Text, + LastUserSnippetTitle: userSnippet.Title, + LastUserSnippetClass: userSnippet.SpeakerClass, + LastAssistantSnippet: assistantSnippet.Text, + LastAssistantSnippetTitle: assistantSnippet.Title, + LastAssistantSnippetClass: assistantSnippet.SpeakerClass, + } +} + +func activeStatus(waitState active.WaitState, ended bool) (string, string) { + if ended { + return "Ended", "ended" + } + switch waitState { + case active.WaitStateAgent: + return "Waiting for agent", "waiting-agent" + default: + return "Waiting for user", "waiting-user" + } +} + +func buildActiveTabs(scope string, selectedDate string, today string, selectedCwd string) []activeTabView { + return []activeTabView{ + {Label: "By day", Path: buildByDayTabPath(selectedDate, today, selectedCwd), Active: scope == "day"}, + {Label: "All active", Path: buildActivePagePath("all", selectedDate, today, selectedCwd), Active: scope == "all"}, + {Label: "Ended", Path: buildActivePagePath("ended", selectedDate, today, selectedCwd), Active: scope == "ended"}, + } +} + +func buildByDayTabPath(selectedDate string, today string, selectedCwd string) string { + if selectedCwd != "" { + return buildDayPagePath(selectedDate, selectedCwd) + } + return buildActivePagePath("day", selectedDate, today, "") +} + +func buildActivePagePath(scope string, selectedDate string, today string, selectedCwd string) string { + params := url.Values{} + if scope != "" && scope != "day" { + params.Set("scope", scope) + } + if selectedDate != "" && (selectedDate != today || selectedCwd != "") { + params.Set("date", selectedDate) + } + if selectedCwd != "" { + params.Set("cwd", selectedCwd) + } + return urlWithQuery("/active", params) +} + +func buildDayPagePath(selectedDate string, selectedCwd string) string { + base := "/" + parts := strings.Split(selectedDate, "-") + if len(parts) == 3 && parts[0] != "" && parts[1] != "" && parts[2] != "" { + base = "/" + strings.Join(parts, "/") + "/" + } + params := url.Values{} + if selectedCwd != "" { + params.Set("cwd", selectedCwd) + } + return urlWithQuery(base, params) +} + +func urlWithQuery(base string, params url.Values) string { + if len(params) == 0 { + return base + } + encoded := params.Encode() + if encoded == "" { + return base + } + return base + "?" + encoded +} + +func activeHeading(scope string, selectedDate string, today string) (string, string) { + switch scope { + case "all": + return "All Active Threads", "No active threads found." + case "ended": + return "Ended Threads", "No ended threads found." + default: + if selectedDate == today { + return "Today's Active Threads", "No active threads for today." + } + return "Active Threads on " + selectedDate, "No active threads on " + selectedDate + "." + } +} + +func maxTime(a time.Time, b time.Time) time.Time { + if a.After(b) { + return a + } + return b +} + +func filterSessionFilesByCwd(files []sessions.SessionFile, cwd string) []sessions.SessionFile { + if cwd == "" { + return files + } + filtered := make([]sessions.SessionFile, 0, len(files)) + for _, file := range files { + if sessions.CwdForFile(file) == cwd { + filtered = append(filtered, file) + } + } + return filtered +} + +func (s *Server) buildSessionViews(files []sessions.SessionFile) []sessionView { + views := make([]sessionView, 0, len(files)) + for _, file := range files { + views = append(views, s.buildSessionListView(file)) + } + return views +} + +func (s *Server) buildSessionListView(file sessions.SessionFile) sessionView { + resumeCommand := buildResumeCommand(file.Meta) + cwd := sessions.CwdForFile(file) + if cwd == sessions.UnknownCwd { + cwd = "" + } + view := sessionView{ + Name: file.Name, + DisplayName: file.DisplayName(), + Size: formatBytes(file.Size), + ModTime: formatTime(file.ModTime), + ResumeCommand: resumeCommand, + Cwd: cwd, + Branch: branchForMeta(file.Meta), + BranchURL: s.branchURLForMeta(file.Meta, cwd), + DateLabel: file.Date.String(), + DatePath: file.Date.Path(), + } + threadStateKey, threadStatusLabel, threadStatusClass, threadAction, threadActionLabel, hasThreadState := s.sessionThreadState(file) + if hasThreadState { + view.ThreadStateKey = threadStateKey + view.ThreadStatusLabel = threadStatusLabel + view.ThreadStatusClass = threadStatusClass + view.ThreadAction = threadAction + view.ThreadActionLabel = threadActionLabel + } + return view +} + +func (s *Server) buildSessionViewsWithSnippets(files []sessions.SessionFile) []sessionView { + views := s.buildSessionViews(files) + for i, file := range files { + userSnippet, assistantSnippet, hasUser := extractLastSnippets(file) + if hasUser && userSnippet.Text == "" { + userSnippet.Text = "(empty)" + } + views[i].LastUserSnippet = userSnippet.Text + views[i].LastUserSnippetTitle = userSnippet.Title + views[i].LastUserSnippetClass = userSnippet.SpeakerClass + views[i].LastAssistantSnippet = assistantSnippet.Text + views[i].LastAssistantSnippetTitle = assistantSnippet.Title + views[i].LastAssistantSnippetClass = assistantSnippet.SpeakerClass + } + return views +} + +func (s *Server) buildSessionViewsPage(files []sessions.SessionFile, page int, perPage int, includeAll bool) ([]sessionView, paginationInfo) { + if includeAll { + pageFiles, pager := paginateSessionFiles(files, page, perPage) + return s.buildSessionViewsWithSnippets(pageFiles), pager + } + return s.buildSessionViewsPageFiltered(files, page, perPage) +} + +func (s *Server) buildSessionViewsPageFiltered(files []sessions.SessionFile, page int, perPage int) ([]sessionView, paginationInfo) { + if page < 1 { + page = 1 + } + if perPage <= 0 { + perPage = 10 + } + start := (page - 1) * perPage + end := start + perPage + total := 0 + foundNext := false + scannedAll := true + views := make([]sessionView, 0, perPage) + for _, file := range files { + userSnippet, assistantSnippet, hasUser := extractLastSnippets(file) + if !hasUser { + continue + } + if total >= start && total < end { + view := s.buildSessionListView(file) + if userSnippet.Text == "" { + userSnippet.Text = "(empty)" + } + view.LastUserSnippet = userSnippet.Text + view.LastUserSnippetTitle = userSnippet.Title + view.LastUserSnippetClass = userSnippet.SpeakerClass + view.LastAssistantSnippet = assistantSnippet.Text + view.LastAssistantSnippetTitle = assistantSnippet.Title + view.LastAssistantSnippetClass = assistantSnippet.SpeakerClass + views = append(views, view) + } + if total >= end { + foundNext = true + scannedAll = false + break + } + total++ + } + totalPages := 0 + if scannedAll && total > 0 { + totalPages = (total + perPage - 1) / perPage + if page > totalPages { + return s.buildSessionViewsPageFiltered(files, totalPages, perPage) + } + } + info := paginationInfo{ + Page: page, + TotalPages: totalPages, + HasPrev: page > 1 && total > start, + HasNext: foundNext || (totalPages > 0 && page < totalPages), + PrevPage: 1, + NextPage: totalPages, + } + if info.HasPrev { + info.PrevPage = page - 1 + } + if info.HasNext { + info.NextPage = page + 1 + } else if totalPages > 0 { + info.NextPage = totalPages + } + return views, info +} + +type sessionSnippet struct { + Text string + Title string + SpeakerClass string +} + +func extractLastSnippets(file sessions.SessionFile) (sessionSnippet, sessionSnippet, bool) { + session, err := sessions.ParseSession(file.Path) + if err != nil { + return sessionSnippet{}, sessionSnippet{}, false + } + userSnippet := sessionSnippet{ + Title: "User", + SpeakerClass: "user", + } + assistantSnippet := sessionSnippet{ + Title: "Agent", + SpeakerClass: "agent", + } + if session.Meta != nil && session.Meta.IsSubagentThread() { + userSnippet.Title = "Agent" + userSnippet.SpeakerClass = "agent" + assistantSnippet.Title = "Subagent" + assistantSnippet.SpeakerClass = "subagent" + } + hasUser := false + for _, item := range session.Items { + switch item.Role { + case "user": + if sessions.IsAutoContextUserMessage(item.Content) { + continue + } + hasUser = true + userSnippet.Text = item.Content + case "assistant": + assistantSnippet.Text = item.Content + } + } + userSnippet.Text = snippetFromContent(userSnippet.Text, 180) + assistantSnippet.Text = snippetFromContent(assistantSnippet.Text, 180) + return userSnippet, assistantSnippet, hasUser +} + +func snippetFromContent(value string, max int) string { + value = strings.TrimSpace(value) + if value == "" { + return "" + } + value = strings.Join(strings.Fields(value), " ") + if max <= 0 { + return value + } + runes := []rune(value) + if len(runes) <= max { + return value + } + if max > 3 { + return string(runes[:max-3]) + "..." + } + return string(runes[:max]) +} + +func buildDirViewsFromFiles(files []sessions.SessionFile) []dirView { + counts := make(map[string]int, len(files)) + for _, file := range files { + cwd := sessions.CwdForFile(file) + counts[cwd]++ + } + return buildDirViewsFromCounts(counts, nil, 0, false) +} + +func buildDirViewsFromCounts(counts map[string]int, recentCounts map[string]int, recentMax int, withHeat bool) []dirView { + keys := make([]string, 0, len(counts)) + for key := range counts { + keys = append(keys, key) + } + sort.Slice(keys, func(i, j int) bool { + if keys[i] == sessions.UnknownCwd { + return false + } + if keys[j] == sessions.UnknownCwd { + return true + } + return keys[i] < keys[j] + }) + + views := make([]dirView, 0, len(keys)) + for _, key := range keys { + view := dirView{ + Label: dirLabel(key), + Value: key, + Count: counts[key], + } + if withHeat { + view.RecentCount = recentCounts[key] + view.HeatColor = heatColor(view.RecentCount, recentMax) + } + views = append(views, view) + } + return views +} + +func previousDateKey(date sessions.DateKey) (sessions.DateKey, bool) { + year, err := strconv.Atoi(date.Year) + if err != nil { + return sessions.DateKey{}, false + } + month, err := strconv.Atoi(date.Month) + if err != nil { + return sessions.DateKey{}, false + } + day, err := strconv.Atoi(date.Day) + if err != nil { + return sessions.DateKey{}, false + } + current := time.Date(year, time.Month(month), day, 0, 0, 0, 0, time.Local) + if current.Year() != year || int(current.Month()) != month || current.Day() != day { + return sessions.DateKey{}, false + } + prev := current.AddDate(0, 0, -1) + return sessions.DateKey{ + Year: fmt.Sprintf("%04d", prev.Year()), + Month: fmt.Sprintf("%02d", int(prev.Month())), + Day: fmt.Sprintf("%02d", prev.Day()), + }, true +} + +func dirLabel(cwd string) string { + if sessions.NormalizeCwd(cwd) == sessions.UnknownCwd { + return "Unknown (no CWD)" + } + return cwd +} + +func displayCwd(cwd string) string { + if sessions.NormalizeCwd(cwd) == sessions.UnknownCwd { + return "" + } + return cwd +} + +func (s *Server) recentCwdCounts(since time.Time) (map[string]int, int) { + counts := map[string]int{} + max := 0 + for _, date := range s.idx.Dates() { + files := s.idx.SessionsByDate(date) + for _, file := range files { + if file.ModTime.Before(since) { + continue + } + cwd := sessions.CwdForFile(file) + counts[cwd]++ + if counts[cwd] > max { + max = counts[cwd] + } + } + } + return counts, max +} + +func (s *Server) recentCwdCountsFromLatestDates(limit int) (map[string]int, int) { + counts := map[string]int{} + max := 0 + if limit <= 0 { + return counts, max + } + dates := s.idx.Dates() + if len(dates) > limit { + dates = dates[:limit] } for _, date := range dates { files := s.idx.SessionsByDate(date) @@ -655,14 +1769,33 @@ func normalizeCwdParam(value string) string { return value } +func normalizeSearchCwdFilter(value string) string { + value = normalizeCwdParam(value) + if value == "" { + return "" + } + if value != "/" && strings.HasSuffix(value, "/") { + value = strings.TrimRight(value, "/") + } + if value != "\\" && strings.HasSuffix(value, "\\") { + value = strings.TrimRight(value, "\\") + } + return value +} + func buildResumeCommand(meta *sessions.SessionMeta) string { if meta == nil || meta.ID == "" { return "" } + commands := make([]string, 0, 3) if meta.Cwd != "" { - return fmt.Sprintf("cd %s\ncodex resume %s", shellQuote(meta.Cwd), meta.ID) + commands = append(commands, "cd "+shellQuote(meta.Cwd)) } - return fmt.Sprintf("codex resume %s", meta.ID) + if branch := branchForMeta(meta); branch != "" { + commands = append(commands, "git switch "+shellQuote(branch)) + } + commands = append(commands, fmt.Sprintf("codex resume %s", meta.ID)) + return strings.Join(commands, "\n") } func shellQuote(value string) string { @@ -672,6 +1805,123 @@ func shellQuote(value string) string { return "'" + strings.ReplaceAll(value, "'", "'\"'\"'") + "'" } +func branchForMeta(meta *sessions.SessionMeta) string { + if meta == nil { + return "" + } + return meta.GitBranch() +} + +func (s *Server) branchURLForMeta(meta *sessions.SessionMeta, cwd string) string { + repoURL := normalizeRepositoryURL(s.repositoryURLForMeta(meta, cwd)) + if repoURL == "" { + return "" + } + branch := branchForMeta(meta) + if branch == "" { + return repoURL + } + parsed, err := url.Parse(repoURL) + if err != nil { + return repoURL + } + host := strings.ToLower(parsed.Hostname()) + if host != "github.com" && host != "www.github.com" { + return repoURL + } + parsed.Path = strings.TrimSuffix(parsed.Path, "/") + "/tree/" + escapeGitHubPath(branch) + parsed.RawPath = "" + return parsed.String() +} + +func (s *Server) repositoryURLForMeta(meta *sessions.SessionMeta, cwd string) string { + if s != nil && s.repoOverrides != nil { + if overrideURL := s.repoOverrides.ResolveRepositoryURL(cwd); overrideURL != "" { + return overrideURL + } + } + if meta == nil { + return "" + } + return meta.GitRepositoryURL() +} + +func normalizeRepositoryURL(value string) string { + value = strings.TrimSpace(value) + if value == "" { + return "" + } + switch { + case strings.HasPrefix(value, "git@github.com:"): + value = "https://github.com/" + strings.TrimPrefix(value, "git@github.com:") + case strings.HasPrefix(value, "ssh://git@github.com/"): + value = "https://github.com/" + strings.TrimPrefix(value, "ssh://git@github.com/") + } + value = strings.TrimSuffix(strings.TrimSuffix(value, "/"), ".git") + parsed, err := url.Parse(value) + if err != nil || parsed.Scheme == "" || parsed.Host == "" { + return "" + } + parsed.Path = strings.TrimSuffix(parsed.Path, "/") + return parsed.String() +} + +func escapeGitHubPath(value string) string { + parts := strings.Split(value, "/") + for i := range parts { + parts[i] = url.PathEscape(parts[i]) + } + return strings.Join(parts, "/") +} + +func semanticRoleLabel(role string) string { + switch strings.ToLower(strings.TrimSpace(role)) { + case "assistant": + return "agent" + case "subagent": + return "subagent" + case "user": + return "user" + default: + return strings.ToLower(strings.TrimSpace(role)) + } +} + +func sessionSummaryKey(file sessions.SessionFile) string { + if file.Meta != nil { + if id := strings.TrimSpace(file.Meta.ID); id != "" { + return "id:" + id + } + } + return "path:" + filepath.ToSlash(filepath.Join(file.Date.Path(), file.Name)) +} + +func (s *Server) sessionThreadState(file sessions.SessionFile) (string, string, string, string, string, bool) { + if s.active == nil || s.activeState == nil { + return "", "", "", "", "", false + } + key := sessionSummaryKey(file) + summary, ok := s.active.Lookup(key) + if !ok { + return "", "", "", "", "", false + } + + ended := false + if mark, ok := s.activeState.Snapshot()[summary.Key]; ok { + ended = mark.ActivityToken == "" || mark.ActivityToken == summary.ActivityToken + } + + statusLabel, statusClass := activeStatus(summary.WaitState, ended) + action := "end" + actionLabel := "⏹️ End" + if ended { + action = "reopen" + actionLabel = "↩️ Reopen" + } + + return summary.Key, statusLabel, statusClass, action, actionLabel, true +} + func (s *Server) buildSessionView(parts []string) (sessionPageView, error) { date, ok := sessions.ParseDate(parts[0], parts[1], parts[2]) if !ok { @@ -692,42 +1942,82 @@ func (s *Server) buildSessionView(parts []string) (sessionPageView, error) { return sessionPageView{}, err } + toolRunOutputs := findToolRunOutputs(session.Items) + groupedOutputIndexes := make(map[int]struct{}, len(toolRunOutputs)) items := make([]itemView, 0, len(session.Items)) lastUserLine := 0 lastAnyUserLine := 0 - for _, item := range session.Items { - autoCtx := item.Role == "user" && sessions.IsAutoContextUserMessage(item.Content) - renderText := item.Content - if autoCtx { - renderText = escapeAutoContextTags(renderText) - } - view := itemView{ - Line: item.Line, - Timestamp: item.Timestamp, - Type: item.Type, - Subtype: item.Subtype, - Role: item.Role, - Title: item.Title, - Content: item.Content, - Class: item.Class, - Markdown: renderItemMarkdown(item), - HTML: markdownToHTML(renderText), - } - if autoCtx { - view.AutoCtx = true - view.Class = strings.TrimSpace(view.Class + " auto-context") + lastAgentLine := 0 + lastItemLine := 0 + isSubagentThread := session.Meta != nil && session.Meta.IsSubagentThread() + subagentDisplayName := "" + subagentDisplayRole := "" + parentThreadID := "" + parentSessionPath := "" + parentSessionTitle := "" + userNavLabel := "user" + displayName := file.DisplayName() + if session.Meta != nil { + if displayName == file.Name { + displayName = sessions.SessionDisplayName(file.Name, s.idx.ThreadName(session.Meta.ID)) + } + subagentDisplayName = session.Meta.SubagentNicknameValue() + subagentDisplayRole = session.Meta.SubagentRoleValue() + parentThreadID = session.Meta.ParentThreadID() + if parentThreadID != "" { + if parentFile, ok := s.idx.LookupByID(parentThreadID); ok { + parentSessionPath = "/" + parentFile.Date.Path() + "/" + parentFile.Name + "#page-top" + parentSessionTitle = formatSessionLinkTitle(parentFile) + } + } + if isSubagentThread { + userNavLabel = "agent" + } + } + for index := 0; index < len(session.Items); index++ { + if _, grouped := groupedOutputIndexes[index]; grouped { + continue } + + item := session.Items[index] + if outputIndex, ok := toolRunOutputs[index]; ok { + callView := s.buildSessionItemView(item, isSubagentThread, subagentDisplayName, subagentDisplayRole) + outputItem := session.Items[outputIndex] + outputView := s.buildSessionItemView(outputItem, isSubagentThread, subagentDisplayName, subagentDisplayRole) + grouped := buildToolRunView(item, callView, outputItem, outputView) + groupedOutputIndexes[outputIndex] = struct{}{} + if outputItem.Line > lastItemLine { + lastItemLine = outputItem.Line + } + items = append(items, grouped) + continue + } + + view := s.buildSessionItemView(item, isSubagentThread, subagentDisplayName, subagentDisplayRole) + autoCtx := view.AutoCtx + isSubagentNotification := item.SubagentID != "" if item.Role == "user" { - lastAnyUserLine = item.Line - if !autoCtx { + if !isSubagentNotification { + lastAnyUserLine = item.Line + } + if !autoCtx && !isSubagentNotification { lastUserLine = item.Line } } + if item.Role == "assistant" { + lastAgentLine = item.Line + } + if item.Line > lastItemLine { + lastItemLine = item.Line + } items = append(items, view) } if lastUserLine == 0 { lastUserLine = lastAnyUserLine } + annotateToolRunGroups(items) + + threadStateKey, threadStatusLabel, threadStatusClass, threadAction, threadActionLabel, hasThreadState := s.sessionThreadState(file) view := sessionPageView{ Date: dateView{ @@ -736,22 +2026,427 @@ func (s *Server) buildSessionView(parts []string) (sessionPageView, error) { Count: 0, }, File: sessionView{ - Name: file.Name, - Size: formatBytes(file.Size), - ModTime: formatTime(file.ModTime), - Cwd: displayCwd(sessions.CwdForFile(file)), + Name: file.Name, + DisplayName: displayName, + Size: formatBytes(file.Size), + ModTime: formatTime(file.ModTime), + ModTimeOnly: formatTimeOnly(file.ModTime), + Cwd: displayCwd(sessions.CwdForFile(file)), + Branch: branchForMeta(file.Meta), + BranchURL: s.branchURLForMeta(file.Meta, sessions.CwdForFile(file)), + DateLabel: date.String(), + DatePath: date.Path(), }, - Meta: session.Meta, - Items: items, - AllMarkdown: renderSessionMarkdown(session.Items), - ResumeCommand: buildResumeCommand(session.Meta), - ThemeClass: s.themeClass, - IsJSONL: strings.HasSuffix(strings.ToLower(file.Name), ".jsonl"), - LastUserLine: lastUserLine, + Meta: session.Meta, + IsSubagentThread: isSubagentThread, + SubagentDisplayName: subagentDisplayName, + SubagentDisplayRole: subagentDisplayRole, + ParentThreadID: parentThreadID, + ParentSessionPath: parentSessionPath, + ParentSessionTitle: parentSessionTitle, + UserNavLabel: userNavLabel, + Items: items, + ResumeCommand: buildResumeCommand(session.Meta), + ThreadStateKey: threadStateKey, + ThreadStatusLabel: threadStatusLabel, + ThreadStatusClass: threadStatusClass, + ThreadAction: threadAction, + ThreadActionLabel: threadActionLabel, + ThemeClass: s.themeClass, + IsJSONL: strings.HasSuffix(strings.ToLower(file.Name), ".jsonl"), + LastUserLine: lastUserLine, + LastAgentLine: lastAgentLine, + LastItemLine: lastItemLine, + } + if !hasThreadState { + view.ThreadStateKey = "" + view.ThreadStatusLabel = "" + view.ThreadStatusClass = "" + view.ThreadAction = "" + view.ThreadActionLabel = "" } return view, nil } +func findToolRunOutputs(items []sessions.RenderItem) map[int]int { + pendingCalls := make(map[string]int) + matches := make(map[int]int) + for index, item := range items { + switch { + case isToolRunCall(item): + pendingCalls[item.CallID] = index + case isToolRunOutput(item): + callIndex, ok := pendingCalls[item.CallID] + if !ok { + continue + } + callItem := items[callIndex] + if !shouldGroupToolRun(callItem, item) { + continue + } + matches[callIndex] = index + delete(pendingCalls, item.CallID) + } + } + return matches +} + +func isToolRunCall(item sessions.RenderItem) bool { + if item.Role != "tool" || item.CallID == "" { + return false + } + switch item.Subtype { + case "function_call", "custom_tool_call": + return true + default: + return false + } +} + +func isToolRunOutput(item sessions.RenderItem) bool { + if item.Role != "tool" || item.CallID == "" { + return false + } + switch item.Subtype { + case "function_call_output", "custom_tool_call_output": + return true + default: + return false + } +} + +func (s *Server) buildSessionItemView(item sessions.RenderItem, isSubagentThread bool, subagentDisplayName, subagentDisplayRole string) itemView { + autoCtx := item.Role == "user" && sessions.IsAutoContextUserMessage(item.Content) + isSubagentNotification := item.SubagentID != "" + turnAbortedMessage, isTurnAborted := "", false + if autoCtx { + if msg, ok := sessions.ExtractTurnAbortedMessage(item.Content); ok { + turnAbortedMessage = msg + isTurnAborted = true + } + } + renderText := item.Content + if autoCtx && !isTurnAborted { + renderText = escapeAutoContextTags(renderText) + } + view := itemView{ + Line: item.Line, + Timestamp: item.Timestamp, + Type: item.Type, + Subtype: item.Subtype, + Role: item.Role, + RoleLabel: semanticRoleLabel(item.Role), + SpeakerClass: semanticRoleLabel(item.Role), + Title: item.Title, + Content: item.Content, + Class: item.Class, + SubagentID: item.SubagentID, + SubagentNickname: item.SubagentNickname, + SubagentStatusType: item.SubagentStatusType, + SubagentRequest: item.SubagentRequest, + Markdown: renderItemMarkdown(item), + HTML: markdownToHTML(renderText), + } + if item.Subtype == "function_call" && item.ToolName == "update_plan" { + if planHTML := renderUpdatePlanHTML(item.CallID, item.ToolInput); planHTML != "" { + view.HTML = planHTML + } + } + if item.Subtype == "custom_tool_call" && item.ToolName == "apply_patch" { + if patchHTML := renderApplyPatchHTML(item.ToolInput); patchHTML != "" { + metaHTML := markdownToHTML(renderCustomToolCallMetaMarkdown(item)) + view.HTML = template.HTML(string(metaHTML) + string(patchHTML)) + } + } + if autoCtx { + view.AutoCtx = true + view.Class = strings.TrimSpace(view.Class + " auto-context") + } + if isSubagentNotification { + view.Class = strings.TrimSpace(view.Class + " subagent-notification") + if item.SubagentRequest != "" { + view.SubagentRequestHTML = markdownToHTML(item.SubagentRequest) + } + if subagentFile, ok := s.idx.LookupByID(item.SubagentID); ok { + view.SubagentSessionPath = "/" + subagentFile.Date.Path() + "/" + subagentFile.Name + "#page-top" + view.SubagentSessionTitle = formatSessionLinkTitle(subagentFile) + if subagentFile.Meta != nil { + if view.SubagentNickname == "" { + view.SubagentNickname = subagentFile.Meta.SubagentNicknameValue() + } + view.SpeakerRole = subagentFile.Meta.SubagentRoleValue() + } + } + view.SpeakerName = view.SubagentNickname + } + if isSubagentThread && !isSubagentNotification { + switch item.Role { + case "assistant": + view.RoleLabel = "subagent" + view.SpeakerClass = "subagent" + if item.Subtype == "message" && view.Title == "Agent" { + view.Title = "Subagent" + } + view.SpeakerName = subagentDisplayName + view.SpeakerRole = subagentDisplayRole + case "user": + view.RoleLabel = "agent" + view.SpeakerClass = "agent" + if item.Subtype == "message" && view.Title == "User" { + view.Title = "Agent" + } + } + } + if view.SpeakerClass != "" { + view.Class = strings.TrimSpace(view.Class + " speaker-" + view.SpeakerClass) + } + if isTurnAborted { + view.IsTurnAborted = true + view.TurnAbortedMessage = turnAbortedMessage + } + return view +} + +func shouldGroupToolRun(callItem, outputItem sessions.RenderItem) bool { + if callItem.Role != "tool" || outputItem.Role != "tool" { + return false + } + if callItem.CallID == "" || callItem.CallID != outputItem.CallID { + return false + } + switch { + case callItem.Subtype == "function_call" && outputItem.Subtype == "function_call_output": + return true + case callItem.Subtype == "custom_tool_call" && outputItem.Subtype == "custom_tool_call_output": + return true + default: + return false + } +} + +func buildToolRunView(callItem sessions.RenderItem, callView itemView, outputItem sessions.RenderItem, outputView itemView) itemView { + title := "Tool run" + subtype := "tool_run" + if callItem.Subtype == "custom_tool_call" { + title = "Custom tool run" + subtype = "custom_tool_run" + } + return itemView{ + Line: callItem.Line, + Timestamp: callItem.Timestamp, + Type: callItem.Type, + Subtype: subtype, + Role: callItem.Role, + RoleLabel: callView.RoleLabel, + SpeakerClass: callView.SpeakerClass, + Title: title, + Content: strings.TrimSpace(callItem.Content + "\n\n" + outputItem.Content), + Class: strings.TrimSpace(callView.Class + " tool-run"), + Markdown: renderToolRunMarkdown(title, callItem, outputItem), + HTML: callView.HTML, + ToolRunCallTitle: callView.Title, + ToolRunOutputLine: outputItem.Line, + ToolRunOutputTitle: outputView.Title, + ToolRunOutputHTML: outputView.HTML, + ToolRunOutputTime: outputItem.Timestamp, + } +} + +func annotateToolRunGroups(items []itemView) { + for start := 0; start < len(items); start++ { + if !isGroupedToolRunView(items[start]) { + continue + } + + end := start + 1 + for end < len(items) && isGroupedToolRunView(items[end]) { + end++ + } + count := end - start + + lastLine := items[end-1].Line + if outputLine := items[end-1].ToolRunOutputLine; outputLine > lastLine { + lastLine = outputLine + } + items[start].ToolRunGroupTitle = toolRunGroupTitle(items[start:end]) + items[start].ToolRunGroupCount = count + items[start].ToolRunGroupLastLine = lastLine + items[end-1].ToolRunGroupEnd = true + + for index := start; index < end; index++ { + items[index].ToolRunHideHeader = true + items[index].ToolRunGroupCount = count + items[index].Class = strings.TrimSpace(items[index].Class + " tool-run-group-member") + if index == start { + items[index].Class = strings.TrimSpace(items[index].Class + " tool-run-group-start") + } + if index == end-1 { + items[index].Class = strings.TrimSpace(items[index].Class + " tool-run-group-end") + } + } + + start = end - 1 + } +} + +func isGroupedToolRunView(item itemView) bool { + if item.ToolRunOutputLine == 0 { + return false + } + switch item.Subtype { + case "tool_run", "custom_tool_run": + return true + default: + return false + } +} + +func toolRunGroupTitle(items []itemView) string { + if len(items) == 0 { + return "" + } + + title := strings.TrimSpace(items[0].Title) + if title == "" { + title = "Tool run" + } + for _, item := range items[1:] { + if strings.TrimSpace(item.Title) != title { + return fmt.Sprintf("Tool run %d", len(items)) + } + } + return fmt.Sprintf("%s %d", title, len(items)) +} + +func renderCustomToolCallMetaMarkdown(item sessions.RenderItem) string { + sections := make([]string, 0, 3) + if value := strings.TrimSpace(item.ToolName); value != "" { + sections = append(sections, "**Custom tool:** "+value) + } + if value := strings.TrimSpace(item.ToolStatus); value != "" { + sections = append(sections, "**Status:** "+value) + } + if value := strings.TrimSpace(item.CallID); value != "" { + sections = append(sections, "**Call ID:** "+value) + } + return strings.TrimSpace(strings.Join(sections, "\n\n")) +} + +func renderApplyPatchHTML(input string) template.HTML { + trimmed := strings.TrimSpace(input) + if trimmed == "" { + return "" + } + + lines := strings.Split(trimmed, "\n") + var buf strings.Builder + buf.WriteString(`
`) + for _, line := range lines { + buf.WriteString(``) + buf.WriteString(html.EscapeString(line)) + buf.WriteString(``) + } + buf.WriteString(`
`) + return template.HTML(buf.String()) +} + +func renderUpdatePlanHTML(callID, input string) template.HTML { + var payload updatePlanHTMLArgs + if err := json.Unmarshal([]byte(input), &payload); err != nil { + return "" + } + + var buf strings.Builder + buf.WriteString("

Tool: update_plan

") + if value := strings.TrimSpace(callID); value != "" { + buf.WriteString("

Call ID: ") + buf.WriteString(html.EscapeString(value)) + buf.WriteString("

") + } + if value := strings.TrimSpace(payload.Explanation); value != "" { + buf.WriteString("

Explanation
") + buf.WriteString(renderPlainTextHTML(value)) + buf.WriteString("

") + } + + hasPlan := false + for _, step := range payload.Plan { + if strings.TrimSpace(step.Step) != "" { + hasPlan = true + break + } + } + if !hasPlan { + return template.HTML(buf.String()) + } + + buf.WriteString("

Plan

") + buf.WriteString(`
    `) + for _, step := range payload.Plan { + text := strings.TrimSpace(step.Step) + if text == "" { + continue + } + className, marker := updatePlanHTMLStatusParts(step.Status) + buf.WriteString(`
  • `) + buf.WriteString(``) + buf.WriteString(html.EscapeString(marker)) + buf.WriteString(``) + buf.WriteString(``) + buf.WriteString(renderPlainTextHTML(text)) + buf.WriteString(`
  • `) + } + buf.WriteString(`
`) + return template.HTML(buf.String()) +} + +func renderPlainTextHTML(text string) string { + escaped := html.EscapeString(strings.TrimSpace(text)) + return strings.ReplaceAll(escaped, "\n", "
\n") +} + +func updatePlanHTMLStatusParts(status string) (className, marker string) { + switch strings.ToLower(strings.TrimSpace(status)) { + case "completed": + return "is-completed", "✅" + case "in_progress": + return "is-in-progress", "□" + case "pending": + return "is-pending", "□" + default: + value := strings.TrimSpace(status) + if value == "" { + return "is-pending", "□" + } + return "is-unknown", "[" + value + "]" + } +} + +func patchLineClass(line string) string { + switch { + case strings.HasPrefix(line, "*** Begin Patch"), strings.HasPrefix(line, "*** End Patch"): + return "patch-line-marker" + case strings.HasPrefix(line, "*** Update File:"), strings.HasPrefix(line, "*** Add File:"), strings.HasPrefix(line, "*** Delete File:"), strings.HasPrefix(line, "*** Move to:"): + return "patch-line-file" + case strings.HasPrefix(line, "@@"): + return "patch-line-hunk" + case strings.HasPrefix(line, "+"): + return "patch-line-add" + case strings.HasPrefix(line, "-"): + return "patch-line-del" + default: + return "patch-line-context" + } +} + +func formatSessionLinkTitle(file sessions.SessionFile) string { + return fmt.Sprintf("%s / %s", file.Date.String(), file.DisplayName()) +} + func themeClass(theme int) string { switch theme { case 1: @@ -822,9 +2517,33 @@ func renderItemMarkdown(item sessions.RenderItem) string { if content == "" { content = "(empty)" } + if item.SubagentID != "" && strings.TrimSpace(item.SubagentRequest) != "" { + return fmt.Sprintf("## %s\n\n### Agent request\n\n%s\n\n### Subagent response\n\n%s\n", title, strings.TrimSpace(item.SubagentRequest), content) + } return fmt.Sprintf("## %s\n\n%s\n", title, content) } +func renderToolRunMarkdown(title string, callItem, outputItem sessions.RenderItem) string { + sections := []string{ + "## " + strings.TrimSpace(title), + "### " + strings.TrimSpace(callItem.Title), + strings.TrimSpace(callItem.Content), + "### " + strings.TrimSpace(outputItem.Title), + strings.TrimSpace(outputItem.Content), + } + filtered := make([]string, 0, len(sections)) + for _, section := range sections { + if strings.TrimSpace(section) == "" { + continue + } + filtered = append(filtered, section) + } + if len(filtered) == 0 { + return "" + } + return strings.TrimSpace(strings.Join(filtered, "\n\n")) + "\n" +} + func renderSessionMarkdown(items []sessions.RenderItem) string { if len(items) == 0 { return "" @@ -836,6 +2555,36 @@ func renderSessionMarkdown(items []sessions.RenderItem) string { return strings.TrimSpace(strings.Join(parts, "\n\n")) + "\n" } +func joinItemMarkdown(items []itemView) string { + if len(items) == 0 { + return "" + } + parts := make([]string, 0, len(items)) + for _, item := range items { + if strings.TrimSpace(item.Markdown) == "" { + continue + } + parts = append(parts, item.Markdown) + } + if len(parts) == 0 { + return "" + } + return strings.TrimSpace(strings.Join(parts, "\n\n")) + "\n" +} + +func sessionItemMarkdown(items []itemView, line int) (string, bool) { + for _, item := range items { + if item.Line != line && item.ToolRunOutputLine != line { + continue + } + if strings.TrimSpace(item.Markdown) == "" { + return "", false + } + return item.Markdown, true + } + return "", false +} + func escapeAutoContextTags(text string) string { replacer := strings.NewReplacer( "", "<INSTRUCTIONS>", @@ -844,6 +2593,8 @@ func escapeAutoContextTags(text string) string { "", "</environment_context>", "", "<turn_aborted>", "", "</turn_aborted>", + "", "<subagent_notification>", + "", "</subagent_notification>", ) return replacer.Replace(text) } diff --git a/internal/web/server_active_test.go b/internal/web/server_active_test.go new file mode 100644 index 0000000..b5df139 --- /dev/null +++ b/internal/web/server_active_test.go @@ -0,0 +1,499 @@ +package web + +import ( + "net/http" + "net/http/httptest" + "net/url" + "os" + "path/filepath" + "strings" + "testing" + "time" + + "codex-manager/internal/active" + "codex-manager/internal/render" + "codex-manager/internal/sessions" +) + +func TestHandleActiveUsesCookieTimeZoneForDayFilter(t *testing.T) { + root := t.TempDir() + sessionsDir := filepath.Join(root, "sessions") + if err := os.MkdirAll(filepath.Join(sessionsDir, "2026", "03", "18"), 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + if err := os.WriteFile(filepath.Join(root, "session_index.jsonl"), []byte("{\"id\":\"session-1\",\"thread_name\":\"tz thread\",\"updated_at\":\"2026-03-18T00:30:00Z\"}\n"), 0o600); err != nil { + t.Fatalf("write session index: %v", err) + } + + filePath := filepath.Join(sessionsDir, "2026", "03", "18", "cross-day.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-18T00:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-1\",\"timestamp\":\"2026-03-18T00:00:00Z\",\"cwd\":\"/tmp/app\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T00:10:00Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nContinue work\"}]}}\n" + + "{\"timestamp\":\"2026-03-18T00:20:00Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"assistant\",\"content\":[{\"type\":\"output_text\",\"text\":\"Still working\"}]}}\n" + + "{\"timestamp\":\"2026-03-18T00:30:00Z\",\"type\":\"event_msg\",\"payload\":{\"type\":\"task_complete\"}}\n" + if err := os.WriteFile(filePath, []byte(data), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + server := newActiveTestServer(t, sessionsDir) + + req := httptest.NewRequest(http.MethodGet, "http://example.com/active?date=2026-03-17", nil) + req.AddCookie(&http.Cookie{Name: activeTimeZoneCookie, Value: "America%2FLos_Angeles"}) + rec := httptest.NewRecorder() + server.ServeHTTP(rec, req) + + if rec.Code != http.StatusOK { + t.Fatalf("status: got %d body %s", rec.Code, rec.Body.String()) + } + if body := rec.Body.String(); !strings.Contains(body, "tz thread (cross-day.jsonl)") { + t.Fatalf("expected thread to appear for Los Angeles day view, body=%s", body) + } else if !strings.Contains(body, "▶️ Copy resume") { + t.Fatalf("expected resume button icon, body=%s", body) + } else if !strings.Contains(body, "⏹️ End") { + t.Fatalf("expected end button icon, body=%s", body) + } else if !strings.Contains(body, "thread-status-waiting-user") { + t.Fatalf("expected waiting-user card class on active page, body=%s", body) + } + + reqJST := httptest.NewRequest(http.MethodGet, "http://example.com/active?date=2026-03-17", nil) + reqJST.AddCookie(&http.Cookie{Name: activeTimeZoneCookie, Value: "Asia%2FTokyo"}) + recJST := httptest.NewRecorder() + server.ServeHTTP(recJST, reqJST) + + if recJST.Code != http.StatusOK { + t.Fatalf("status: got %d body %s", recJST.Code, recJST.Body.String()) + } + if body := recJST.Body.String(); strings.Contains(body, "tz thread (cross-day.jsonl)") { + t.Fatalf("expected thread to be absent for Tokyo day view, body=%s", body) + } +} + +func TestHandleActiveShowsThinkingPlaceholderForUnansweredUserMessage(t *testing.T) { + root := t.TempDir() + sessionsDir := filepath.Join(root, "sessions") + dateDir := filepath.Join(sessionsDir, "2026", "03", "19") + if err := os.MkdirAll(dateDir, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + filePath := filepath.Join(dateDir, "thinking.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-19T02:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-thinking\",\"timestamp\":\"2026-03-19T02:00:00Z\",\"cwd\":\"/tmp/app\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-19T02:00:05Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nOld request\"}]}}\n" + + "{\"timestamp\":\"2026-03-19T02:00:10Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"assistant\",\"content\":[{\"type\":\"output_text\",\"text\":\"Old assistant reply\"}]}}\n" + + "{\"timestamp\":\"2026-03-19T02:00:20Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nNewest request\"}]}}\n" + + "{\"timestamp\":\"2026-03-19T02:00:30Z\",\"type\":\"event_msg\",\"payload\":{\"type\":\"task_started\"}}\n" + if err := os.WriteFile(filePath, []byte(data), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + server := newActiveTestServer(t, sessionsDir) + + req := httptest.NewRequest(http.MethodGet, "http://example.com/active?scope=all", nil) + rec := httptest.NewRecorder() + server.ServeHTTP(rec, req) + + if rec.Code != http.StatusOK { + t.Fatalf("status: got %d body %s", rec.Code, rec.Body.String()) + } + body := rec.Body.String() + if !strings.Contains(body, "Newest request") { + t.Fatalf("expected latest user snippet, body=%s", body) + } + if !strings.Contains(body, "Thinking...") { + t.Fatalf("expected thinking placeholder, body=%s", body) + } + if strings.Contains(body, "Old assistant reply") { + t.Fatalf("expected stale assistant snippet to be hidden, body=%s", body) + } +} + +func TestHandleActiveShowsBranchAndBranchAwareResumeCommand(t *testing.T) { + root := t.TempDir() + sessionsDir := filepath.Join(root, "sessions") + dateDir := filepath.Join(sessionsDir, "2026", "03", "19") + if err := os.MkdirAll(dateDir, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + filePath := filepath.Join(dateDir, "branch.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-19T02:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-branch\",\"timestamp\":\"2026-03-19T02:00:00Z\",\"cwd\":\"/tmp/app\",\"git\":{\"branch\":\"feature/active-branch\",\"commit_hash\":\"abc123\"},\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-19T02:00:05Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nResume me\"}]}}\n" + + "{\"timestamp\":\"2026-03-19T02:00:10Z\",\"type\":\"event_msg\",\"payload\":{\"type\":\"task_complete\"}}\n" + if err := os.WriteFile(filePath, []byte(data), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + server := newActiveTestServer(t, sessionsDir) + + req := httptest.NewRequest(http.MethodGet, "http://example.com/active?scope=all", nil) + rec := httptest.NewRecorder() + server.ServeHTTP(rec, req) + + if rec.Code != http.StatusOK { + t.Fatalf("status: got %d body %s", rec.Code, rec.Body.String()) + } + body := rec.Body.String() + if !strings.Contains(body, "Branch: feature/active-branch") { + t.Fatalf("expected branch label on active page, body=%s", body) + } + if !strings.Contains(body, "git switch 'feature/active-branch'") { + t.Fatalf("expected branch-aware resume command on active page, body=%s", body) + } +} + +func TestHandleActiveShowsDateDividersForAllActiveThreads(t *testing.T) { + root := t.TempDir() + sessionsDir := filepath.Join(root, "sessions") + date19Dir := filepath.Join(sessionsDir, "2026", "03", "19") + date18Dir := filepath.Join(sessionsDir, "2026", "03", "18") + if err := os.MkdirAll(date19Dir, 0o755); err != nil { + t.Fatalf("mkdir 19: %v", err) + } + if err := os.MkdirAll(date18Dir, 0o755); err != nil { + t.Fatalf("mkdir 18: %v", err) + } + + newerPath := filepath.Join(date19Dir, "newer.jsonl") + newerData := "" + + "{\"timestamp\":\"2026-03-19T02:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-newer\",\"timestamp\":\"2026-03-19T02:00:00Z\",\"cwd\":\"/tmp/app\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-19T02:00:10Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nNewer thread\"}]}}\n" + + "{\"timestamp\":\"2026-03-19T02:00:20Z\",\"type\":\"event_msg\",\"payload\":{\"type\":\"task_complete\"}}\n" + if err := os.WriteFile(newerPath, []byte(newerData), 0o600); err != nil { + t.Fatalf("write newer: %v", err) + } + + olderPath := filepath.Join(date18Dir, "older.jsonl") + olderData := "" + + "{\"timestamp\":\"2026-03-18T03:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-older\",\"timestamp\":\"2026-03-18T03:00:00Z\",\"cwd\":\"/tmp/app\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T03:00:10Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nOlder thread\"}]}}\n" + + "{\"timestamp\":\"2026-03-18T03:00:20Z\",\"type\":\"event_msg\",\"payload\":{\"type\":\"task_complete\"}}\n" + if err := os.WriteFile(olderPath, []byte(olderData), 0o600); err != nil { + t.Fatalf("write older: %v", err) + } + + server := newActiveTestServer(t, sessionsDir) + + req := httptest.NewRequest(http.MethodGet, "http://example.com/active?scope=all", nil) + rec := httptest.NewRecorder() + server.ServeHTTP(rec, req) + + if rec.Code != http.StatusOK { + t.Fatalf("status: got %d body %s", rec.Code, rec.Body.String()) + } + body := rec.Body.String() + if count := strings.Count(body, `class="active-date-divider-label">2026-03-19`); count != 1 { + t.Fatalf("expected one 2026-03-19 divider, got %d body=%s", count, body) + } + if count := strings.Count(body, `class="active-date-divider-label">2026-03-18`); count != 1 { + t.Fatalf("expected one 2026-03-18 divider, got %d body=%s", count, body) + } + if strings.Index(body, `class="active-date-divider-label">2026-03-19`) > strings.Index(body, "newer.jsonl") { + t.Fatalf("expected 2026-03-19 divider before newer thread, body=%s", body) + } + if strings.Index(body, `class="active-date-divider-label">2026-03-18`) > strings.Index(body, "older.jsonl") { + t.Fatalf("expected 2026-03-18 divider before older thread, body=%s", body) + } +} + +func TestHandleActiveFiltersByCwdAndPreservesDirectoryTabs(t *testing.T) { + root := t.TempDir() + sessionsDir := filepath.Join(root, "sessions") + dateDir := filepath.Join(sessionsDir, "2026", "03", "18") + if err := os.MkdirAll(dateDir, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + appPath := filepath.Join(dateDir, "app.jsonl") + appData := "" + + "{\"timestamp\":\"2026-03-18T02:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-app\",\"timestamp\":\"2026-03-18T02:00:00Z\",\"cwd\":\"/tmp/app\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T02:00:10Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nKeep app thread alive\"}]}}\n" + + "{\"timestamp\":\"2026-03-18T02:00:20Z\",\"type\":\"event_msg\",\"payload\":{\"type\":\"task_complete\"}}\n" + if err := os.WriteFile(appPath, []byte(appData), 0o600); err != nil { + t.Fatalf("write app session: %v", err) + } + + otherPath := filepath.Join(dateDir, "other.jsonl") + otherData := "" + + "{\"timestamp\":\"2026-03-18T03:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-other\",\"timestamp\":\"2026-03-18T03:00:00Z\",\"cwd\":\"/tmp/other\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T03:00:10Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nKeep other thread alive\"}]}}\n" + + "{\"timestamp\":\"2026-03-18T03:00:20Z\",\"type\":\"event_msg\",\"payload\":{\"type\":\"task_complete\"}}\n" + if err := os.WriteFile(otherPath, []byte(otherData), 0o600); err != nil { + t.Fatalf("write other session: %v", err) + } + + server := newActiveTestServer(t, sessionsDir) + + req := httptest.NewRequest(http.MethodGet, "http://example.com/active?scope=all&date=2026-03-18&cwd=/tmp/app", nil) + rec := httptest.NewRecorder() + server.ServeHTTP(rec, req) + + if rec.Code != http.StatusOK { + t.Fatalf("status: got %d body %s", rec.Code, rec.Body.String()) + } + body := rec.Body.String() + if !strings.Contains(body, "app.jsonl") { + t.Fatalf("expected filtered app session, body=%s", body) + } + if strings.Contains(body, "other.jsonl") { + t.Fatalf("expected other cwd session to be filtered out, body=%s", body) + } + if !strings.Contains(body, "Directory filter active: /tmp/app") { + t.Fatalf("expected active page cwd notice, body=%s", body) + } + if !strings.Contains(body, `href="/2026/03/18/?cwd=%2Ftmp%2Fapp"`) { + t.Fatalf("expected by-day tab to link back to cwd day view, body=%s", body) + } + if !strings.Contains(body, `href="/active?cwd=%2Ftmp%2Fapp&date=2026-03-18&scope=all"`) { + t.Fatalf("expected all-active tab to preserve cwd/date, body=%s", body) + } + if !strings.Contains(body, `href="/active?cwd=%2Ftmp%2Fapp&date=2026-03-18&scope=ended"`) { + t.Fatalf("expected ended tab to preserve cwd/date, body=%s", body) + } +} + +func TestHandleActiveStateMarksEndedAndReopens(t *testing.T) { + root := t.TempDir() + sessionsDir := filepath.Join(root, "sessions") + if err := os.MkdirAll(filepath.Join(sessionsDir, "2026", "03", "18"), 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + filePath := filepath.Join(sessionsDir, "2026", "03", "18", "ended.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-18T03:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-2\",\"timestamp\":\"2026-03-18T03:00:00Z\",\"cwd\":\"/tmp/app\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T03:00:10Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nReview it\"}]}}\n" + + "{\"timestamp\":\"2026-03-18T03:00:20Z\",\"type\":\"event_msg\",\"payload\":{\"type\":\"task_complete\"}}\n" + if err := os.WriteFile(filePath, []byte(data), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + server := newActiveTestServer(t, sessionsDir) + summary := server.active.Summaries()[0] + + postReq := httptest.NewRequest(http.MethodPost, "http://example.com/active/state", strings.NewReader("action=end&key="+url.QueryEscape(summary.Key))) + postReq.Header.Set("Content-Type", "application/x-www-form-urlencoded") + postRec := httptest.NewRecorder() + server.ServeHTTP(postRec, postReq) + + if postRec.Code != http.StatusOK { + t.Fatalf("status: got %d body %s", postRec.Code, postRec.Body.String()) + } + if got := len(server.activeState.Snapshot()); got != 1 { + t.Fatalf("expected one ended mark, got %d", got) + } + + endedReq := httptest.NewRequest(http.MethodGet, "http://example.com/active?scope=ended", nil) + endedRec := httptest.NewRecorder() + server.ServeHTTP(endedRec, endedReq) + if body := endedRec.Body.String(); !strings.Contains(body, "↩️ Reopen") || !strings.Contains(body, "ended.jsonl") { + t.Fatalf("expected ended thread to render, body=%s", body) + } + + reopenReq := httptest.NewRequest(http.MethodPost, "http://example.com/active/state", strings.NewReader("action=reopen&key="+url.QueryEscape(summary.Key))) + reopenReq.Header.Set("Content-Type", "application/x-www-form-urlencoded") + reopenRec := httptest.NewRecorder() + server.ServeHTTP(reopenRec, reopenReq) + + if reopenRec.Code != http.StatusOK { + t.Fatalf("status: got %d body %s", reopenRec.Code, reopenRec.Body.String()) + } + if got := len(server.activeState.Snapshot()); got != 0 { + t.Fatalf("expected ended mark to clear, got %d", got) + } +} + +func TestHandleSessionShowsThreadStateActionAndEndedState(t *testing.T) { + root := t.TempDir() + sessionsDir := filepath.Join(root, "sessions") + if err := os.MkdirAll(filepath.Join(sessionsDir, "2026", "03", "18"), 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + filePath := filepath.Join(sessionsDir, "2026", "03", "18", "thread-state.jsonl") + data := "" + + "{\"timestamp\":\"2026-03-18T04:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-3\",\"timestamp\":\"2026-03-18T04:00:00Z\",\"cwd\":\"/tmp/app\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T04:00:10Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nHandle it\"}]}}\n" + + "{\"timestamp\":\"2026-03-18T04:00:20Z\",\"type\":\"event_msg\",\"payload\":{\"type\":\"task_complete\"}}\n" + if err := os.WriteFile(filePath, []byte(data), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + server := newActiveTestServer(t, sessionsDir) + summary := server.active.Summaries()[0] + + sessionReq := httptest.NewRequest(http.MethodGet, "http://example.com/2026/03/18/thread-state.jsonl", nil) + sessionRec := httptest.NewRecorder() + server.ServeHTTP(sessionRec, sessionReq) + + if sessionRec.Code != http.StatusOK { + t.Fatalf("status: got %d body %s", sessionRec.Code, sessionRec.Body.String()) + } + if body := sessionRec.Body.String(); !strings.Contains(body, "▶️ Copy resume command") { + t.Fatalf("expected resume command action, body=%s", body) + } else if !strings.Contains(body, "⏹️ End") { + t.Fatalf("expected end action on session page, body=%s", body) + } else if !strings.Contains(body, "Waiting for user") { + t.Fatalf("expected waiting-user status on session page, body=%s", body) + } else if !strings.Contains(body, "STATE:") { + t.Fatalf("expected state label on session page, body=%s", body) + } else if !strings.Contains(body, "Navigate this thread:") { + t.Fatalf("expected navigate label on session page, body=%s", body) + } else if !strings.Contains(body, "Previous user message") || !strings.Contains(body, "Next user message") || !strings.Contains(body, "Last user message") { + t.Fatalf("expected user jump controls on session page, body=%s", body) + } else if !strings.Contains(body, `data-active-key="`+summary.Key+`"`) { + t.Fatalf("expected active key on session page, body=%s", body) + } + + postReq := httptest.NewRequest(http.MethodPost, "http://example.com/active/state", strings.NewReader("action=end&key="+url.QueryEscape(summary.Key))) + postReq.Header.Set("Content-Type", "application/x-www-form-urlencoded") + postRec := httptest.NewRecorder() + server.ServeHTTP(postRec, postReq) + + if postRec.Code != http.StatusOK { + t.Fatalf("status: got %d body %s", postRec.Code, postRec.Body.String()) + } + + endedReq := httptest.NewRequest(http.MethodGet, "http://example.com/2026/03/18/thread-state.jsonl", nil) + endedRec := httptest.NewRecorder() + server.ServeHTTP(endedRec, endedReq) + + if endedRec.Code != http.StatusOK { + t.Fatalf("status: got %d body %s", endedRec.Code, endedRec.Body.String()) + } + if body := endedRec.Body.String(); !strings.Contains(body, "↩️ Reopen") { + t.Fatalf("expected reopen action on ended session page, body=%s", body) + } else if !strings.Contains(body, "Ended") { + t.Fatalf("expected ended status on session page, body=%s", body) + } +} + +func TestHandleDayShowsThreadStateActionsForDirectorySessions(t *testing.T) { + root := t.TempDir() + sessionsDir := filepath.Join(root, "sessions") + dateDir := filepath.Join(sessionsDir, "2026", "03", "18") + if err := os.MkdirAll(dateDir, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + waitingUserPath := filepath.Join(dateDir, "waiting-user.jsonl") + waitingUserData := "" + + "{\"timestamp\":\"2026-03-18T05:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-user\",\"timestamp\":\"2026-03-18T05:00:00Z\",\"cwd\":\"/tmp/app\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T05:00:10Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nWrap it up\"}]}}\n" + + "{\"timestamp\":\"2026-03-18T05:00:20Z\",\"type\":\"event_msg\",\"payload\":{\"type\":\"task_complete\"}}\n" + if err := os.WriteFile(waitingUserPath, []byte(waitingUserData), 0o600); err != nil { + t.Fatalf("write waiting-user: %v", err) + } + + waitingAgentPath := filepath.Join(dateDir, "waiting-agent.jsonl") + waitingAgentData := "" + + "{\"timestamp\":\"2026-03-18T06:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-agent\",\"timestamp\":\"2026-03-18T06:00:00Z\",\"cwd\":\"/tmp/app\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T06:00:10Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nKeep working\"}]}}\n" + + "{\"timestamp\":\"2026-03-18T06:00:20Z\",\"type\":\"event_msg\",\"payload\":{\"type\":\"task_started\"}}\n" + if err := os.WriteFile(waitingAgentPath, []byte(waitingAgentData), 0o600); err != nil { + t.Fatalf("write waiting-agent: %v", err) + } + + server := newActiveTestServer(t, sessionsDir) + + req := httptest.NewRequest(http.MethodGet, "http://example.com/2026/03/18/?cwd=/tmp/app", nil) + rec := httptest.NewRecorder() + server.ServeHTTP(rec, req) + + if rec.Code != http.StatusOK { + t.Fatalf("status: got %d body %s", rec.Code, rec.Body.String()) + } + body := rec.Body.String() + if !strings.Contains(body, "waiting-user.jsonl") { + t.Fatalf("expected waiting-user session, body=%s", body) + } + if !strings.Contains(body, "waiting-agent.jsonl") { + t.Fatalf("expected waiting-agent session, body=%s", body) + } + if !strings.Contains(body, "Waiting for user") { + t.Fatalf("expected waiting-user status, body=%s", body) + } + if !strings.Contains(body, "Waiting for agent") { + t.Fatalf("expected waiting-agent status, body=%s", body) + } + if !strings.Contains(body, "thread-status-waiting-user") { + t.Fatalf("expected waiting-user card class on day page, body=%s", body) + } + if !strings.Contains(body, "⏹️ End") { + t.Fatalf("expected end action button, body=%s", body) + } + if !strings.Contains(body, `href="/2026/03/18/?cwd=%2Ftmp%2Fapp"`) { + t.Fatalf("expected by-day tab on day page, body=%s", body) + } + if !strings.Contains(body, `href="/active?cwd=%2Ftmp%2Fapp&date=2026-03-18&scope=all"`) { + t.Fatalf("expected all-active tab on day page, body=%s", body) + } + if !strings.Contains(body, `href="/active?cwd=%2Ftmp%2Fapp&date=2026-03-18&scope=ended"`) { + t.Fatalf("expected ended tab on day page, body=%s", body) + } + + var waitingUserSummary active.Summary + foundWaitingUser := false + for _, summary := range server.active.Summaries() { + if summary.SessionID == "session-user" { + waitingUserSummary = summary + foundWaitingUser = true + break + } + } + if !foundWaitingUser { + t.Fatal("expected waiting-user summary") + } + + postReq := httptest.NewRequest(http.MethodPost, "http://example.com/active/state", strings.NewReader("action=end&key="+url.QueryEscape(waitingUserSummary.Key))) + postReq.Header.Set("Content-Type", "application/x-www-form-urlencoded") + postRec := httptest.NewRecorder() + server.ServeHTTP(postRec, postReq) + if postRec.Code != http.StatusOK { + t.Fatalf("status: got %d body %s", postRec.Code, postRec.Body.String()) + } + + endedReq := httptest.NewRequest(http.MethodGet, "http://example.com/2026/03/18/?cwd=/tmp/app", nil) + endedRec := httptest.NewRecorder() + server.ServeHTTP(endedRec, endedReq) + + if endedRec.Code != http.StatusOK { + t.Fatalf("status: got %d body %s", endedRec.Code, endedRec.Body.String()) + } + endedBody := endedRec.Body.String() + if !strings.Contains(endedBody, "Ended") { + t.Fatalf("expected ended status on day page, body=%s", endedBody) + } + if !strings.Contains(endedBody, "↩️ Reopen") { + t.Fatalf("expected reopen action on day page, body=%s", endedBody) + } +} + +func newActiveTestServer(t *testing.T, sessionsDir string) *Server { + t.Helper() + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh sessions: %v", err) + } + + activeIdx := active.NewIndex() + if err := activeIdx.RefreshFrom(idx); err != nil { + t.Fatalf("refresh active index: %v", err) + } + + state, err := active.LoadStateStore(filepath.Join(filepath.Dir(sessionsDir), "session_state.json")) + if err != nil { + t.Fatalf("load state: %v", err) + } + + renderer, err := render.New() + if err != nil { + t.Fatalf("renderer: %v", err) + } + + server := NewServer(idx, nil, renderer, sessionsDir, "", "", 3) + server.EnableActive(activeIdx, state, time.Hour) + return server +} diff --git a/internal/web/server_notifications_test.go b/internal/web/server_notifications_test.go new file mode 100644 index 0000000..c02ada3 --- /dev/null +++ b/internal/web/server_notifications_test.go @@ -0,0 +1,72 @@ +package web + +import ( + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "strings" + "testing" + + "codex-manager/internal/notifications" + "codex-manager/internal/render" + "codex-manager/internal/sessions" +) + +func TestHandleHookStoresNotificationAndRendersNotificationsPage(t *testing.T) { + root := t.TempDir() + sessionsDir := filepath.Join(root, "sessions") + if err := os.MkdirAll(sessionsDir, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh sessions: %v", err) + } + + renderer, err := render.New() + if err != nil { + t.Fatalf("renderer: %v", err) + } + + store, err := notifications.LoadStore(filepath.Join(root, "notifications.jsonl")) + if err != nil { + t.Fatalf("load notifications: %v", err) + } + + server := NewServer(idx, nil, renderer, sessionsDir, "", "", 3) + server.EnableNotifications(store) + + payload := `{"type":"task_complete","message":"done"}` + hookReq := httptest.NewRequest(http.MethodPost, "http://example.com/hook", strings.NewReader(payload)) + hookReq.Header.Set("Content-Type", "application/json") + hookReq.Header.Set("X-Codex-Test", "yes") + hookRec := httptest.NewRecorder() + server.ServeHTTP(hookRec, hookReq) + + if hookRec.Code != http.StatusNoContent { + t.Fatalf("status: got %d body %s", hookRec.Code, hookRec.Body.String()) + } + if got := len(store.Entries()); got != 1 { + t.Fatalf("expected 1 notification, got %d", got) + } + + pageReq := httptest.NewRequest(http.MethodGet, "http://example.com/notifications", nil) + pageRec := httptest.NewRecorder() + server.ServeHTTP(pageRec, pageReq) + + if pageRec.Code != http.StatusOK { + t.Fatalf("status: got %d body %s", pageRec.Code, pageRec.Body.String()) + } + body := pageRec.Body.String() + if !strings.Contains(body, "POST /hook") { + t.Fatalf("expected hook entry, body=%s", body) + } + if !strings.Contains(body, "task_complete") { + t.Fatalf("expected JSON payload, body=%s", body) + } + if !strings.Contains(body, "X-Codex-Test") { + t.Fatalf("expected headers to render, body=%s", body) + } +} diff --git a/internal/web/server_test.go b/internal/web/server_test.go new file mode 100644 index 0000000..774fd11 --- /dev/null +++ b/internal/web/server_test.go @@ -0,0 +1,1161 @@ +package web + +import ( + "bytes" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "strings" + "testing" + + "codex-manager/internal/render" + "codex-manager/internal/repooverride" + "codex-manager/internal/sessions" +) + +func TestBuildSessionViewLinksSubagentNotification(t *testing.T) { + sessionsDir := t.TempDir() + datePath := filepath.Join(sessionsDir, "2026", "03", "13") + if err := os.MkdirAll(datePath, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + parentPath := filepath.Join(datePath, "parent.jsonl") + parentData := "" + + "{\"timestamp\":\"2026-03-13T00:25:44Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"parent\",\"timestamp\":\"2026-03-13T00:25:44Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\",\"source\":\"cli\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:25:44Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call\",\"name\":\"spawn_agent\",\"arguments\":\"{\\\"message\\\":\\\"review BillingStatusService\\\"}\",\"call_id\":\"call_1\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:25:44Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call_output\",\"call_id\":\"call_1\",\"output\":\"{\\\"agent_id\\\":\\\"agent-1\\\",\\\"nickname\\\":\\\"Anscombe\\\"}\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:25:45Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"\\n{\\\"agent_id\\\":\\\"agent-1\\\",\\\"status\\\":{\\\"completed\\\":\\\"done\\\"}}\\n\"}]}}\n" + if err := os.WriteFile(parentPath, []byte(parentData), 0o600); err != nil { + t.Fatalf("write parent: %v", err) + } + + subagentPath := filepath.Join(datePath, "rollout-2026-03-13T09-23-02-agent-1.jsonl") + subagentData := "" + + "{\"timestamp\":\"2026-03-13T00:23:02Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"agent-1\",\"timestamp\":\"2026-03-13T00:23:02Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:23:03Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"assistant\",\"content\":[{\"type\":\"output_text\",\"text\":\"hello\"}]}}\n" + if err := os.WriteFile(subagentPath, []byte(subagentData), 0o600); err != nil { + t.Fatalf("write subagent: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + server := NewServer(idx, nil, nil, sessionsDir, "", "", 3) + view, err := server.buildSessionView([]string{"2026", "03", "13", "parent.jsonl"}) + if err != nil { + t.Fatalf("buildSessionView: %v", err) + } + + if len(view.Items) != 3 { + t.Fatalf("expected 3 items, got %d", len(view.Items)) + } + call := view.Items[0] + if call.Subtype != "function_call" { + t.Fatalf("expected visible function call, got %#v", call) + } + if call.RoleLabel != "tool" { + t.Fatalf("expected tool role label, got %q", call.RoleLabel) + } + if !strings.Contains(call.Content, "review BillingStatusService") { + t.Fatalf("expected visible spawn request, got %q", call.Content) + } + + spawned := view.Items[1] + if spawned.AutoCtx { + t.Fatalf("expected spawned subagent item to be visible") + } + if spawned.SubagentID != "agent-1" { + t.Fatalf("expected spawned subagent id, got %q", spawned.SubagentID) + } + if spawned.SubagentStatusType != "spawned" { + t.Fatalf("expected spawned status, got %q", spawned.SubagentStatusType) + } + if spawned.SubagentSessionPath != "/2026/03/13/rollout-2026-03-13T09-23-02-agent-1.jsonl#page-top" { + t.Fatalf("unexpected spawned subagent session path: %q", spawned.SubagentSessionPath) + } + + item := view.Items[2] + if item.AutoCtx { + t.Fatalf("expected subagent notification to be visible") + } + if item.SubagentID != "agent-1" { + t.Fatalf("expected subagent id, got %q", item.SubagentID) + } + if item.SubagentNickname != "Anscombe" { + t.Fatalf("expected subagent nickname, got %q", item.SubagentNickname) + } + if item.SubagentStatusType != "completed" { + t.Fatalf("expected completed status, got %q", item.SubagentStatusType) + } + if item.SubagentRequest != "review BillingStatusService" { + t.Fatalf("expected subagent request, got %q", item.SubagentRequest) + } + if item.SubagentSessionPath != "/2026/03/13/rollout-2026-03-13T09-23-02-agent-1.jsonl#page-top" { + t.Fatalf("unexpected subagent session path: %q", item.SubagentSessionPath) + } +} + +func TestBuildSessionViewShowsSelectedResponseItemsAndSkipsEncryptedOnlyReasoning(t *testing.T) { + sessionsDir := t.TempDir() + datePath := filepath.Join(sessionsDir, "2026", "03", "18") + if err := os.MkdirAll(datePath, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + sessionPath := filepath.Join(datePath, "response-items.jsonl") + sessionData := "" + + "{\"timestamp\":\"2026-03-18T00:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-1\",\"timestamp\":\"2026-03-18T00:00:00Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"web_search_call\",\"status\":\"completed\",\"action\":{\"type\":\"search\",\"query\":\"Codex CLI notify hook\",\"queries\":[\"Codex CLI notify hook\",\"OpenAI Codex notifications\"]}}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:02Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"custom_tool_call_output\",\"call_id\":\"call_patch\",\"output\":\"{\\\"output\\\":\\\"Success. Updated the following files:\\nM /tmp/file.txt\\n\\\",\\\"metadata\\\":{\\\"exit_code\\\":0}}\"}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:03Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"ghost_snapshot\",\"ghost_commit\":{\"id\":\"abc123\",\"parent\":\"def456\",\"preexisting_untracked_files\":[],\"preexisting_untracked_dirs\":[]}}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:04Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"reasoning\",\"summary\":[],\"content\":null,\"encrypted_content\":\"secret\"}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:05Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"reasoning\",\"summary\":[{\"type\":\"summary_text\",\"text\":\"Keep this\"}]}}\n" + if err := os.WriteFile(sessionPath, []byte(sessionData), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + server := NewServer(idx, nil, nil, sessionsDir, "", "", 3) + view, err := server.buildSessionView([]string{"2026", "03", "18", "response-items.jsonl"}) + if err != nil { + t.Fatalf("buildSessionView: %v", err) + } + + if len(view.Items) != 4 { + t.Fatalf("expected 4 visible items, got %d", len(view.Items)) + } + if view.Items[0].Subtype != "web_search_call" || !strings.Contains(view.Items[0].Content, "Expanded queries") { + t.Fatalf("expected visible web search item, got %#v", view.Items[0]) + } + if view.Items[1].Subtype != "custom_tool_call_output" || !strings.Contains(view.Items[1].Content, "Success. Updated the following files:") { + t.Fatalf("expected visible custom tool output item, got %#v", view.Items[1]) + } + if strings.Contains(string(view.Items[1].HTML), "✅`) || !strings.Contains(string(view.Items[0].HTML), `class="update-plan-step is-pending"`) { + t.Fatalf("expected plan list html, got %s", view.Items[0].HTML) + } +} + +func TestBuildSessionViewExtractsFunctionCallOutputTextPayload(t *testing.T) { + sessionsDir := t.TempDir() + datePath := filepath.Join(sessionsDir, "2026", "03", "18") + if err := os.MkdirAll(datePath, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + sessionPath := filepath.Join(datePath, "function-call-output-text.jsonl") + sessionData := "" + + "{\"timestamp\":\"2026-03-18T00:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-1\",\"timestamp\":\"2026-03-18T00:00:00Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call_output\",\"call_id\":\"call_text\",\"output\":\"[{\\\"text\\\":\\\"Current user is a member of 2 teams:\\\\n\\\\n\\\\n{\\\\n \\\\\\\"teams\\\\\\\": [\\\\n {\\\\n \\\\\\\"id\\\\\\\": \\\\\\\"team-1\\\\\\\"\\\\n }\\\\n ]\\\\n}\\\\n\\\",\\\"type\\\":\\\"text\\\"}]\"}}\n" + if err := os.WriteFile(sessionPath, []byte(sessionData), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + server := NewServer(idx, nil, nil, sessionsDir, "", "", 3) + view, err := server.buildSessionView([]string{"2026", "03", "18", "function-call-output-text.jsonl"}) + if err != nil { + t.Fatalf("buildSessionView: %v", err) + } + + if len(view.Items) != 1 { + t.Fatalf("expected 1 visible item, got %d", len(view.Items)) + } + if !strings.Contains(view.Items[0].Content, "Current user is a member of 2 teams:") { + t.Fatalf("expected extracted output text, got %q", view.Items[0].Content) + } + if strings.Contains(view.Items[0].Content, "\"type\":\"text\"") || strings.Contains(view.Items[0].Content, "```") { + t.Fatalf("expected raw payload to stay hidden, got %q", view.Items[0].Content) + } + if strings.Contains(string(view.Items[0].HTML), "`); count != 2 { + t.Fatalf("expected 2 collapsed tool output details, got %d html=%s", count, html) + } + if strings.Contains(html, `
`) { + t.Fatalf("expected tool output details to stay collapsed by default, got %s", html) + } + if count := strings.Count(html, `Reveal output`); count != 2 { + t.Fatalf("expected reveal output summaries for grouped and standalone outputs, got %d html=%s", count, html) + } +} + +func TestSessionTemplateFetchesMarkdownOnDemand(t *testing.T) { + sessionsDir := t.TempDir() + datePath := filepath.Join(sessionsDir, "2026", "03", "19") + if err := os.MkdirAll(datePath, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + sessionPath := filepath.Join(datePath, "copy-markdown.jsonl") + sessionData := "" + + "{\"timestamp\":\"2026-03-19T00:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-copy\",\"timestamp\":\"2026-03-19T00:00:00Z\",\"cwd\":\"/tmp/project\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-19T00:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nCopy me\"}]}}\n" + if err := os.WriteFile(sessionPath, []byte(sessionData), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + renderer, err := render.New() + if err != nil { + t.Fatalf("renderer: %v", err) + } + + server := NewServer(idx, nil, renderer, sessionsDir, "", "", 3) + view, err := server.buildSessionView([]string{"2026", "03", "19", "copy-markdown.jsonl"}) + if err != nil { + t.Fatalf("buildSessionView: %v", err) + } + + var buf bytes.Buffer + if err := renderer.Execute(&buf, "session", view); err != nil { + t.Fatalf("render session: %v", err) + } + + html := buf.String() + if !strings.Contains(html, `data-copy-url="/markdown/2026/03/19/copy-markdown.jsonl"`) { + t.Fatalf("expected thread markdown fetch url, got %s", html) + } + if !strings.Contains(html, `data-copy-url="/markdown/2026/03/19/copy-markdown.jsonl?line=2"`) { + t.Fatalf("expected item markdown fetch url, got %s", html) + } + if strings.Contains(html, `id="md-all"`) || strings.Contains(html, `id="md-2"`) { + t.Fatalf("expected markdown textareas to be removed, got %s", html) + } + if !strings.Contains(html, `scrollToElement(userSections[target], "smooth")`) || !strings.Contains(html, `setTimeout(function () { scrollToElement(target); }, 0);`) { + t.Fatalf("expected smooth user navigation but instant initial jump, got %s", html) + } +} + +func TestSessionTemplateGroupsConsecutiveToolRunsUnderOneHeader(t *testing.T) { + sessionsDir := t.TempDir() + datePath := filepath.Join(sessionsDir, "2026", "03", "18") + if err := os.MkdirAll(datePath, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + sessionPath := filepath.Join(datePath, "tool-run-group-header.jsonl") + sessionData := "" + + "{\"timestamp\":\"2026-03-18T00:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-group\",\"timestamp\":\"2026-03-18T00:00:00Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call\",\"name\":\"exec_command\",\"arguments\":\"{\\\"cmd\\\":\\\"pwd\\\",\\\"workdir\\\":\\\"/tmp/project\\\"}\",\"call_id\":\"call_a\"}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:02Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call\",\"name\":\"exec_command\",\"arguments\":\"{\\\"cmd\\\":\\\"ls\\\",\\\"workdir\\\":\\\"/tmp/project\\\"}\",\"call_id\":\"call_b\"}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:03Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call_output\",\"call_id\":\"call_a\",\"output\":\"/tmp/project\"}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:04Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call_output\",\"call_id\":\"call_b\",\"output\":\"file.txt\"}}\n" + if err := os.WriteFile(sessionPath, []byte(sessionData), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + renderer, err := render.New() + if err != nil { + t.Fatalf("renderer: %v", err) + } + + server := NewServer(idx, nil, renderer, sessionsDir, "", "", 3) + view, err := server.buildSessionView([]string{"2026", "03", "18", "tool-run-group-header.jsonl"}) + if err != nil { + t.Fatalf("buildSessionView: %v", err) + } + + var buf bytes.Buffer + if err := renderer.Execute(&buf, "session", view); err != nil { + t.Fatalf("render session: %v", err) + } + + html := buf.String() + if count := strings.Count(html, `class="session-group-details">`); count != 1 { + t.Fatalf("expected 1 collapsed tool-run group details, got %d html=%s", count, html) + } + if strings.Contains(html, `class="session-group-details" open>`) { + t.Fatalf("expected tool-run group to stay collapsed by default, got %s", html) + } + if !strings.Contains(html, "Tool run 2") { + t.Fatalf("expected grouped tool-run title, got %s", html) + } + if !strings.Contains(html, `window.addEventListener("hashchange", handleInitialJump)`) || !strings.Contains(html, `openAncestorDetails(target);`) { + t.Fatalf("expected hash jump to open collapsed tool-run groups, got %s", html) + } + if !strings.Contains(html, `scrollToElement(summary || lastItem);`) { + t.Fatalf("expected default jump to keep collapsed tool-run groups closed, got %s", html) + } + if strings.Count(html, `class="session-header"`) != 0 { + t.Fatalf("expected grouped tool runs to hide repeated item headers, got %s", html) + } + if count := strings.Count(html, `class="tool-run-part-actions"`); count != 2 { + t.Fatalf("expected inline tool-run actions for each grouped item, got %d html=%s", count, html) + } + if count := strings.Count(html, `data-copy-url="/markdown/2026/03/18/tool-run-group-header.jsonl?line=`); count != 2 { + t.Fatalf("expected 2 grouped markdown copy actions, got %d html=%s", count, html) + } +} + +func TestSessionTemplateShowsSingleToolRunGroupHeader(t *testing.T) { + sessionsDir := t.TempDir() + datePath := filepath.Join(sessionsDir, "2026", "03", "18") + if err := os.MkdirAll(datePath, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + sessionPath := filepath.Join(datePath, "tool-run-single-group-header.jsonl") + sessionData := "" + + "{\"timestamp\":\"2026-03-18T00:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-group-1\",\"timestamp\":\"2026-03-18T00:00:00Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call\",\"name\":\"exec_command\",\"arguments\":\"{\\\"cmd\\\":\\\"pwd\\\",\\\"workdir\\\":\\\"/tmp/project\\\"}\",\"call_id\":\"call_a\"}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:02Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call_output\",\"call_id\":\"call_a\",\"output\":\"/tmp/project\"}}\n" + if err := os.WriteFile(sessionPath, []byte(sessionData), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + renderer, err := render.New() + if err != nil { + t.Fatalf("renderer: %v", err) + } + + server := NewServer(idx, nil, renderer, sessionsDir, "", "", 3) + view, err := server.buildSessionView([]string{"2026", "03", "18", "tool-run-single-group-header.jsonl"}) + if err != nil { + t.Fatalf("buildSessionView: %v", err) + } + + var buf bytes.Buffer + if err := renderer.Execute(&buf, "session", view); err != nil { + t.Fatalf("render session: %v", err) + } + + html := buf.String() + if count := strings.Count(html, `class="session-group-details">`); count != 1 { + t.Fatalf("expected 1 collapsed single tool-run group details, got %d html=%s", count, html) + } + if !strings.Contains(html, "Tool run 1") { + t.Fatalf("expected single grouped tool-run title, got %s", html) + } + if strings.Count(html, `class="session-header"`) != 0 { + t.Fatalf("expected single grouped tool run to hide repeated item header, got %s", html) + } +} + +func TestHandleSessionMarkdownReturnsThreadAndGroupedLineMarkdown(t *testing.T) { + sessionsDir := t.TempDir() + datePath := filepath.Join(sessionsDir, "2026", "03", "18") + if err := os.MkdirAll(datePath, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + sessionPath := filepath.Join(datePath, "markdown.jsonl") + sessionData := "" + + "{\"timestamp\":\"2026-03-18T00:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-md\",\"timestamp\":\"2026-03-18T00:00:00Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call\",\"name\":\"exec_command\",\"arguments\":\"{\\\"cmd\\\":\\\"pwd\\\",\\\"workdir\\\":\\\"/tmp/project\\\"}\",\"call_id\":\"call_exec\"}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:02Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"function_call_output\",\"call_id\":\"call_exec\",\"output\":\"/tmp/project\"}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:03Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"assistant\",\"content\":[{\"type\":\"output_text\",\"text\":\"done\"}]}}\n" + if err := os.WriteFile(sessionPath, []byte(sessionData), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + server := NewServer(idx, nil, nil, sessionsDir, "", "", 3) + + req := httptest.NewRequest(http.MethodGet, "/markdown/2026/03/18/markdown.jsonl", nil) + rec := httptest.NewRecorder() + server.ServeHTTP(rec, req) + + if rec.Code != http.StatusOK { + t.Fatalf("expected 200 for full markdown, got %d body=%s", rec.Code, rec.Body.String()) + } + body := rec.Body.String() + if !strings.Contains(body, "## Tool run") || !strings.Contains(body, "### Tool call") || !strings.Contains(body, "### Tool output") || !strings.Contains(body, "## Agent") { + t.Fatalf("expected grouped thread markdown, got %q", body) + } + if got := rec.Header().Get("Content-Type"); got != "text/plain; charset=utf-8" { + t.Fatalf("unexpected content type: %q", got) + } + + lineReq := httptest.NewRequest(http.MethodGet, "/markdown/2026/03/18/markdown.jsonl?line=3", nil) + lineRec := httptest.NewRecorder() + server.ServeHTTP(lineRec, lineReq) + + if lineRec.Code != http.StatusOK { + t.Fatalf("expected 200 for grouped line markdown, got %d body=%s", lineRec.Code, lineRec.Body.String()) + } + lineBody := lineRec.Body.String() + if !strings.Contains(lineBody, "## Tool run") || !strings.Contains(lineBody, "### Tool output") || strings.Contains(lineBody, "## Agent") { + t.Fatalf("expected grouped markdown for output line only, got %q", lineBody) + } +} + +func TestBuildSessionViewShowsBranchAndBranchAwareResumeCommand(t *testing.T) { + sessionsDir := t.TempDir() + datePath := filepath.Join(sessionsDir, "2026", "03", "19") + if err := os.MkdirAll(datePath, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + sessionPath := filepath.Join(datePath, "branch.jsonl") + sessionData := "" + + "{\"timestamp\":\"2026-03-19T00:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-branch\",\"timestamp\":\"2026-03-19T00:00:00Z\",\"cwd\":\"/tmp/project\",\"git\":{\"branch\":\"feature/session-branch\",\"commit_hash\":\"abc123\",\"repository_url\":\"https://github.com/cinkster/codex-manager.git\"},\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-19T00:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nShow branch\"}]}}\n" + if err := os.WriteFile(sessionPath, []byte(sessionData), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + renderer, err := render.New() + if err != nil { + t.Fatalf("renderer: %v", err) + } + + server := NewServer(idx, nil, renderer, sessionsDir, "", "", 3) + view, err := server.buildSessionView([]string{"2026", "03", "19", "branch.jsonl"}) + if err != nil { + t.Fatalf("buildSessionView: %v", err) + } + + if view.File.Branch != "feature/session-branch" { + t.Fatalf("expected branch on session view, got %q", view.File.Branch) + } + if view.File.BranchURL != "https://github.com/cinkster/codex-manager/tree/feature/session-branch" { + t.Fatalf("expected branch url on session view, got %q", view.File.BranchURL) + } + if view.ResumeCommand != "cd '/tmp/project'\ngit switch 'feature/session-branch'\ncodex resume session-branch" { + t.Fatalf("unexpected resume command: %q", view.ResumeCommand) + } + + var buf bytes.Buffer + if err := renderer.Execute(&buf, "session", view); err != nil { + t.Fatalf("render session: %v", err) + } + html := buf.String() + if !strings.Contains(html, "Branch: feature/session-branch") { + t.Fatalf("expected branch label in rendered html, got %s", html) + } + if !strings.Contains(html, `href="https://github.com/cinkster/codex-manager/tree/feature/session-branch"`) { + t.Fatalf("expected github branch link in rendered html, got %s", html) + } + if !strings.Contains(html, `target="_blank"`) { + t.Fatalf("expected branch link to open in new window, got %s", html) + } + if !strings.Contains(html, `class="session-page-branch-icon-svg"`) { + t.Fatalf("expected branch icon svg in rendered html, got %s", html) + } + if !strings.Contains(html, "git switch 'feature/session-branch'") { + t.Fatalf("expected branch-aware resume command in rendered html, got %s", html) + } +} + +func TestBuildSessionViewUsesRepositoryOverrideForBranchURL(t *testing.T) { + sessionsDir := t.TempDir() + datePath := filepath.Join(sessionsDir, "2026", "03", "19") + if err := os.MkdirAll(datePath, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + sessionPath := filepath.Join(datePath, "branch-override.jsonl") + sessionData := "" + + "{\"timestamp\":\"2026-03-19T00:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-branch-override\",\"timestamp\":\"2026-03-19T00:00:00Z\",\"cwd\":\"/home/makoto/codex-manager/internal/web\",\"git\":{\"branch\":\"feature/session-branch\",\"commit_hash\":\"abc123\",\"repository_url\":\"https://github.com/cinkster/codex-manager.git\"},\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-19T00:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nShow branch\"}]}}\n" + if err := os.WriteFile(sessionPath, []byte(sessionData), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + overridePath := filepath.Join(t.TempDir(), "session_repository_overrides.json") + overrideData := `{ + "version": 1, + "rules": [ + { + "cwd_prefix": "/home/makoto/codex-manager", + "repository_url": "https://github.com/makoto-soracom/codex-manager.git" + } + ] +} +` + if err := os.WriteFile(overridePath, []byte(overrideData), 0o600); err != nil { + t.Fatalf("write overrides: %v", err) + } + + overrideStore, err := repooverride.LoadStore(overridePath) + if err != nil { + t.Fatalf("LoadStore: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + server := NewServer(idx, nil, nil, sessionsDir, "", "", 3) + server.EnableRepoOverrides(overrideStore) + view, err := server.buildSessionView([]string{"2026", "03", "19", "branch-override.jsonl"}) + if err != nil { + t.Fatalf("buildSessionView: %v", err) + } + + if view.File.BranchURL != "https://github.com/makoto-soracom/codex-manager/tree/feature/session-branch" { + t.Fatalf("expected override branch url on session view, got %q", view.File.BranchURL) + } +} + +func TestBuildSessionViewOmitsSiblingSessionNavAndKeepsUserJumpControls(t *testing.T) { + sessionsDir := t.TempDir() + datePath := filepath.Join(sessionsDir, "2026", "03", "19") + if err := os.MkdirAll(datePath, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + olderPath := filepath.Join(datePath, "older.jsonl") + olderData := "" + + "{\"timestamp\":\"2026-03-19T00:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-older\",\"timestamp\":\"2026-03-19T00:00:00Z\",\"cwd\":\"/tmp/project\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-19T00:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nOlder\"}]}}\n" + if err := os.WriteFile(olderPath, []byte(olderData), 0o600); err != nil { + t.Fatalf("write older session: %v", err) + } + + currentPath := filepath.Join(datePath, "current.jsonl") + currentData := "" + + "{\"timestamp\":\"2026-03-19T00:10:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-current\",\"timestamp\":\"2026-03-19T00:10:00Z\",\"cwd\":\"/tmp/project\",\"git\":{\"branch\":\"feature/session-branch\",\"commit_hash\":\"abc123\"},\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-19T00:10:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nCurrent\"}]}}\n" + if err := os.WriteFile(currentPath, []byte(currentData), 0o600); err != nil { + t.Fatalf("write current session: %v", err) + } + + newerPath := filepath.Join(datePath, "newer.jsonl") + newerData := "" + + "{\"timestamp\":\"2026-03-19T00:20:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-newer\",\"timestamp\":\"2026-03-19T00:20:00Z\",\"cwd\":\"/tmp/project\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-19T00:20:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"## My request for Codex:\\nNewer\"}]}}\n" + if err := os.WriteFile(newerPath, []byte(newerData), 0o600); err != nil { + t.Fatalf("write newer session: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + renderer, err := render.New() + if err != nil { + t.Fatalf("renderer: %v", err) + } + + server := NewServer(idx, nil, renderer, sessionsDir, "", "", 3) + view, err := server.buildSessionView([]string{"2026", "03", "19", "current.jsonl"}) + if err != nil { + t.Fatalf("buildSessionView: %v", err) + } + + var buf bytes.Buffer + if err := renderer.Execute(&buf, "session", view); err != nil { + t.Fatalf("render session: %v", err) + } + + html := buf.String() + if !strings.Contains(html, "CWD: /tmp/project") { + t.Fatalf("expected cwd in toolbar, got %s", html) + } + if !strings.Contains(html, "Branch: feature/session-branch") { + t.Fatalf("expected branch in toolbar, got %s", html) + } + if !strings.Contains(html, "Previous user message") || !strings.Contains(html, "Next user message") || !strings.Contains(html, "Last user message") { + t.Fatalf("expected user jump controls in toolbar, got %s", html) + } + if strings.Contains(html, "/2026/03/19/older.jsonl#last-user") || strings.Contains(html, "/2026/03/19/newer.jsonl#last-user") { + t.Fatalf("expected sibling session navigation links to be removed, got %s", html) + } +} + +func TestBuildSessionViewRendersApplyPatchAsPatchBlock(t *testing.T) { + sessionsDir := t.TempDir() + datePath := filepath.Join(sessionsDir, "2026", "03", "18") + if err := os.MkdirAll(datePath, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + sessionPath := filepath.Join(datePath, "apply-patch.jsonl") + sessionData := "" + + "{\"timestamp\":\"2026-03-18T00:00:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-1\",\"timestamp\":\"2026-03-18T00:00:00Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-18T00:00:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"custom_tool_call\",\"status\":\"completed\",\"call_id\":\"call_patch\",\"name\":\"apply_patch\",\"input\":\"*** Begin Patch\\n*** Update File: /tmp/file.txt\\n@@\\n-old\\n+new\\n*** End Patch\"}}\n" + if err := os.WriteFile(sessionPath, []byte(sessionData), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + server := NewServer(idx, nil, nil, sessionsDir, "", "", 3) + view, err := server.buildSessionView([]string{"2026", "03", "18", "apply-patch.jsonl"}) + if err != nil { + t.Fatalf("buildSessionView: %v", err) + } + + if len(view.Items) != 1 { + t.Fatalf("expected 1 visible item, got %d", len(view.Items)) + } + html := string(view.Items[0].HTML) + if !strings.Contains(html, `class="patch-block"`) { + t.Fatalf("expected patch block html, got %s", html) + } + if !strings.Contains(html, `patch-line-add">+new`) || !strings.Contains(html, `patch-line-del">-old`) { + t.Fatalf("expected add/delete patch lines, got %s", html) + } + if strings.Contains(html, "\\nCurrent working directory: /tmp\\n\"}]}}\n" + + "{\"timestamp\":\"2026-03-13T00:23:04Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"Please inspect the parser.\"}]}}\n" + if err := os.WriteFile(subagentPath, []byte(subagentData), 0o600); err != nil { + t.Fatalf("write subagent: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + server := NewServer(idx, nil, nil, sessionsDir, "", "", 3) + view, err := server.buildSessionView([]string{"2026", "03", "13", "subagent.jsonl"}) + if err != nil { + t.Fatalf("buildSessionView: %v", err) + } + + if !view.IsSubagentThread { + t.Fatalf("expected subagent thread view") + } + if view.SubagentDisplayName != "Anscombe" { + t.Fatalf("expected subagent display name, got %q", view.SubagentDisplayName) + } + if view.SubagentDisplayRole != "explorer" { + t.Fatalf("expected subagent display role, got %q", view.SubagentDisplayRole) + } + if view.ParentSessionPath != "/2026/03/13/parent.jsonl#page-top" { + t.Fatalf("unexpected parent session path: %q", view.ParentSessionPath) + } + if view.UserNavLabel != "agent" { + t.Fatalf("expected agent nav label, got %q", view.UserNavLabel) + } + if len(view.Items) != 3 { + t.Fatalf("expected 3 items, got %d", len(view.Items)) + } + + subagentItem := view.Items[0] + if subagentItem.Title != "Subagent" { + t.Fatalf("expected subagent title, got %q", subagentItem.Title) + } + if subagentItem.RoleLabel != "subagent" { + t.Fatalf("expected subagent role label, got %q", subagentItem.RoleLabel) + } + if subagentItem.SpeakerClass != "subagent" { + t.Fatalf("expected subagent speaker class, got %q", subagentItem.SpeakerClass) + } + if subagentItem.SpeakerName != "Anscombe" { + t.Fatalf("expected subagent speaker name, got %q", subagentItem.SpeakerName) + } + if subagentItem.SpeakerRole != "explorer" { + t.Fatalf("expected subagent speaker role, got %q", subagentItem.SpeakerRole) + } + + autoContextItem := view.Items[1] + if !autoContextItem.AutoCtx { + t.Fatalf("expected auto context item") + } + if autoContextItem.Title != "Agent" { + t.Fatalf("expected auto context title to be Agent, got %q", autoContextItem.Title) + } + if autoContextItem.RoleLabel != "agent" { + t.Fatalf("expected auto context role label, got %q", autoContextItem.RoleLabel) + } + if autoContextItem.SpeakerClass != "agent" { + t.Fatalf("expected auto context speaker class, got %q", autoContextItem.SpeakerClass) + } + + agentItem := view.Items[2] + if agentItem.Title != "Agent" { + t.Fatalf("expected agent title, got %q", agentItem.Title) + } + if agentItem.RoleLabel != "agent" { + t.Fatalf("expected agent role label, got %q", agentItem.RoleLabel) + } + if agentItem.SpeakerClass != "agent" { + t.Fatalf("expected agent speaker class, got %q", agentItem.SpeakerClass) + } +} + +func TestBuildSessionViewsWithSnippetsUsesSemanticSpeakerLabels(t *testing.T) { + sessionsDir := t.TempDir() + datePath := filepath.Join(sessionsDir, "2026", "03", "13") + if err := os.MkdirAll(datePath, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + standardPath := filepath.Join(datePath, "standard.jsonl") + standardData := "" + + "{\"timestamp\":\"2026-03-13T00:10:00Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"standard-1\",\"timestamp\":\"2026-03-13T00:10:00Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:10:01Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"Please check the list view.\"}]}}\n" + + "{\"timestamp\":\"2026-03-13T00:10:02Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"assistant\",\"content\":[{\"type\":\"output_text\",\"text\":\"I updated the session snippet.\"}]}}\n" + if err := os.WriteFile(standardPath, []byte(standardData), 0o600); err != nil { + t.Fatalf("write standard session: %v", err) + } + + subagentPath := filepath.Join(datePath, "subagent.jsonl") + subagentData := "" + + "{\"timestamp\":\"2026-03-13T00:23:02Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"agent-1\",\"forked_from_id\":\"parent-1\",\"timestamp\":\"2026-03-13T00:23:02Z\",\"cwd\":\"/tmp\",\"originator\":\"codex_cli_rs\",\"cli_version\":\"0.114.0\",\"source\":{\"subagent\":{\"thread_spawn\":{\"parent_thread_id\":\"parent-1\",\"depth\":1,\"agent_nickname\":\"Anscombe\",\"agent_role\":\"explorer\"}}},\"agent_nickname\":\"Anscombe\",\"agent_role\":\"explorer\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:23:03Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"assistant\",\"content\":[{\"type\":\"output_text\",\"text\":\"I checked the code.\"}]}}\n" + + "{\"timestamp\":\"2026-03-13T00:23:04Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"Please inspect the parser.\"}]}}\n" + if err := os.WriteFile(subagentPath, []byte(subagentData), 0o600); err != nil { + t.Fatalf("write subagent session: %v", err) + } + + info, err := os.Stat(standardPath) + if err != nil { + t.Fatalf("stat standard session: %v", err) + } + subagentInfo, err := os.Stat(subagentPath) + if err != nil { + t.Fatalf("stat subagent session: %v", err) + } + files := []sessions.SessionFile{ + { + Name: "standard.jsonl", + Path: standardPath, + Date: sessions.DateKey{Year: "2026", Month: "03", Day: "13"}, + ModTime: info.ModTime(), + Size: info.Size(), + }, + { + Name: "subagent.jsonl", + Path: subagentPath, + Date: sessions.DateKey{Year: "2026", Month: "03", Day: "13"}, + ModTime: subagentInfo.ModTime(), + Size: subagentInfo.Size(), + }, + } + + server := NewServer(nil, nil, nil, "", "", "", 3) + views := server.buildSessionViewsWithSnippets(files) + if len(views) != 2 { + t.Fatalf("expected 2 views, got %d", len(views)) + } + + standardView := views[0] + if standardView.LastUserSnippetTitle != "User" { + t.Fatalf("expected standard user title, got %q", standardView.LastUserSnippetTitle) + } + if standardView.LastUserSnippetClass != "user" { + t.Fatalf("expected standard user class, got %q", standardView.LastUserSnippetClass) + } + if standardView.LastAssistantSnippetTitle != "Agent" { + t.Fatalf("expected standard assistant title, got %q", standardView.LastAssistantSnippetTitle) + } + if standardView.LastAssistantSnippetClass != "agent" { + t.Fatalf("expected standard assistant class, got %q", standardView.LastAssistantSnippetClass) + } + + subagentView := views[1] + if subagentView.LastUserSnippetTitle != "Agent" { + t.Fatalf("expected subagent user title to be Agent, got %q", subagentView.LastUserSnippetTitle) + } + if subagentView.LastUserSnippetClass != "agent" { + t.Fatalf("expected subagent user class to be agent, got %q", subagentView.LastUserSnippetClass) + } + if subagentView.LastAssistantSnippetTitle != "Subagent" { + t.Fatalf("expected subagent assistant title to be Subagent, got %q", subagentView.LastAssistantSnippetTitle) + } + if subagentView.LastAssistantSnippetClass != "subagent" { + t.Fatalf("expected subagent assistant class to be subagent, got %q", subagentView.LastAssistantSnippetClass) + } +} + +func TestBuildSessionViewsUseDisplayNameFromSessionIndex(t *testing.T) { + root := t.TempDir() + sessionsDir := filepath.Join(root, "sessions") + datePath := filepath.Join(sessionsDir, "2026", "03", "13") + if err := os.MkdirAll(datePath, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + if err := os.WriteFile(filepath.Join(root, "session_index.jsonl"), []byte("{\"id\":\"session-1\",\"thread_name\":\"pr461 #3\",\"updated_at\":\"2026-03-13T06:09:42Z\"}\n"), 0o600); err != nil { + t.Fatalf("write session index: %v", err) + } + + fileName := "rollout-2026-03-13T13-36-02-session-1.jsonl" + filePath := filepath.Join(datePath, fileName) + data := "" + + "{\"timestamp\":\"2026-03-13T00:25:44Z\",\"type\":\"session_meta\",\"payload\":{\"id\":\"session-1\",\"timestamp\":\"2026-03-13T00:25:44Z\",\"cwd\":\"/tmp\",\"originator\":\"cli\",\"cli_version\":\"0.1\",\"source\":\"cli\"}}\n" + + "{\"timestamp\":\"2026-03-13T00:25:45Z\",\"type\":\"response_item\",\"payload\":{\"type\":\"message\",\"role\":\"user\",\"content\":[{\"type\":\"input_text\",\"text\":\"Please check the display name.\"}]}}\n" + if err := os.WriteFile(filePath, []byte(data), 0o600); err != nil { + t.Fatalf("write session: %v", err) + } + + idx := sessions.NewIndex(sessionsDir) + if err := idx.Refresh(); err != nil { + t.Fatalf("refresh: %v", err) + } + + server := NewServer(idx, nil, nil, sessionsDir, "", "", 3) + view, err := server.buildSessionView([]string{"2026", "03", "13", fileName}) + if err != nil { + t.Fatalf("buildSessionView: %v", err) + } + + want := "pr461 #3 (" + fileName + ")" + if view.File.DisplayName != want { + t.Fatalf("expected display name %q, got %q", want, view.File.DisplayName) + } + + date, ok := sessions.ParseDate("2026", "03", "13") + if !ok { + t.Fatal("expected valid date") + } + listViews := server.buildSessionViewsWithSnippets(idx.SessionsByDate(date)) + if len(listViews) != 1 { + t.Fatalf("expected 1 list view, got %d", len(listViews)) + } + if listViews[0].DisplayName != want { + t.Fatalf("expected list display name %q, got %q", want, listViews[0].DisplayName) + } +} diff --git a/internal/web/share.go b/internal/web/share.go index 535c7fc..3129c4a 100644 --- a/internal/web/share.go +++ b/internal/web/share.go @@ -1,6 +1,7 @@ package web import ( + "fmt" "net/http" "os" "path/filepath" @@ -16,7 +17,15 @@ func NewShareServer(shareDir string) http.Handler { } path := strings.TrimPrefix(r.URL.Path, "/") - if path == "" || strings.Contains(path, "/") || strings.Contains(path, "\\") || strings.Contains(path, "..") { + if path == "" { + w.Header().Set("Content-Type", "text/html; charset=utf-8") + w.WriteHeader(http.StatusOK) + if r.Method == http.MethodGet { + _, _ = fmt.Fprint(w, "Codex Manager Share Server

Codex Manager Share Server

This server only serves exact share filenames. Use the Share button in the main UI and open the generated URL, for example:

http://localhost:8081/<uuid>.html

") + } + return + } + if strings.Contains(path, "/") || strings.Contains(path, "\\") || strings.Contains(path, "..") { http.NotFound(w, r) return }