Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 55 additions & 8 deletions src/ouroboros/cli/commands/pm.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,13 @@
from ouroboros.bigbang.interview import InterviewRound
from ouroboros.cli.formatters import console
from ouroboros.cli.formatters.panels import print_error, print_info, print_success, print_warning
from ouroboros.config import get_clarification_model, get_llm_backend
from ouroboros.core.types import Result
from ouroboros.providers.factory import (
create_llm_adapter,
resolve_llm_backend,
resolve_llm_permission_mode,
)

app = typer.Typer(
name="pm",
Expand Down Expand Up @@ -49,13 +55,13 @@ def pm_command(
),
] = None,
model: Annotated[
str,
str | None,
typer.Option(
"--model",
"-m",
help="LLM model to use for the PM interview.",
),
] = "anthropic/claude-sonnet-4-20250514",
] = None,
debug: Annotated[
bool,
typer.Option(
Expand Down Expand Up @@ -89,17 +95,33 @@ def pm_command(
else:
print_info("Starting new PM interview session...")

console.print(f" Model: [dim]{model}[/]\n")

try:
resolved_backend = resolve_llm_backend(get_llm_backend())
resolved_model = model or get_clarification_model(resolved_backend)
permission_mode = resolve_llm_permission_mode(
backend=resolved_backend,
use_case="interview",
)

console.print(f" Model: [dim]{resolved_model}[/]\n")
if permission_mode == "bypassPermissions":
print_warning(
"Interview backend "
f"'{resolved_backend}' uses bypassPermissions for question generation."
)

asyncio.run(
_run_pm_interview(
resume_id=resume,
model=model,
model=resolved_model,
backend=resolved_backend,
debug=debug,
output_dir=output,
)
)
except ValueError as exc:
print_error(str(exc))
raise typer.Exit(code=1) from exc
except KeyboardInterrupt:
print_info("\nPM interview interrupted. Progress has been saved.")
raise typer.Exit(code=0)
Expand Down Expand Up @@ -298,10 +320,28 @@ def _save_cli_pm_meta(session_id: str, engine: Any) -> None:
meta_path.write_text(json.dumps(meta, ensure_ascii=False, indent=2), encoding="utf-8")


def _make_message_callback(debug: bool):
"""Create a debug callback for streaming local agent status."""
if not debug:
return None

def callback(msg_type: str, content: str) -> None:
if msg_type == "thinking":
first_line = content.split("\n")[0].strip()
display = first_line[:100] + "..." if len(first_line) > 100 else first_line
if display:
console.print(f" [dim]thinking:[/] {display}")
elif msg_type == "tool":
console.print(f" [yellow]tool:[/] {content}")

return callback


async def _run_pm_interview(
resume_id: str | None,
model: str,
debug: bool, # noqa: ARG001
backend: str | None,
debug: bool,
output_dir: str | None = None,
) -> None:
"""Run the PM interview loop.
Expand All @@ -312,13 +352,20 @@ async def _run_pm_interview(
Args:
resume_id: Optional session ID to resume.
model: LLM model identifier.
backend: Resolved LLM backend name.
debug: Enable debug output.
output_dir: Optional output directory for the generated PM document.
"""
from ouroboros.bigbang.pm_interview import PMInterviewEngine
from ouroboros.providers.litellm_adapter import LiteLLMAdapter

adapter = LiteLLMAdapter()
adapter = create_llm_adapter(
backend=backend,
use_case="interview",
allowed_tools=None,
max_turns=5,
on_message=_make_message_callback(debug),
cwd=Path.cwd(),
)
engine = PMInterviewEngine.create(llm_adapter=adapter, model=model)

# Check for existing PM seeds before starting a new session
Expand Down
176 changes: 176 additions & 0 deletions tests/unit/cli/test_pm_runtime_adapter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,176 @@
"""Tests for PM CLI adapter selection and runtime wiring."""

from __future__ import annotations

import asyncio
from pathlib import Path
from types import SimpleNamespace
from unittest.mock import AsyncMock, Mock, patch

import typer

from ouroboros.cli.commands.pm import _run_pm_interview, pm_command
from ouroboros.core.types import Result


def test_run_pm_interview_uses_factory_for_interview_adapter_on_resume() -> None:
"""Resume mode should build the adapter through the shared interview factory."""
sentinel_adapter = object()
engine = SimpleNamespace(
load_state=AsyncMock(return_value=SimpleNamespace(is_err=True, error="boom")),
)

with (
patch(
"ouroboros.cli.commands.pm.create_llm_adapter", return_value=sentinel_adapter
) as mock_factory,
patch(
"ouroboros.bigbang.pm_interview.PMInterviewEngine.create", return_value=engine
) as mock_create,
):
try:
asyncio.run(
_run_pm_interview(
resume_id="session-123",
model="default",
backend="codex",
debug=False,
output_dir=None,
)
)
except typer.Exit:
pass
else:
raise AssertionError("Expected typer.Exit when mocked load_state returns an error")

mock_factory.assert_called_once_with(
backend="codex",
use_case="interview",
allowed_tools=None,
max_turns=5,
on_message=None,
cwd=Path.cwd(),
)
mock_create.assert_called_once_with(llm_adapter=sentinel_adapter, model="default")


def test_run_pm_interview_uses_interview_runtime_options_on_new_session() -> None:
"""New sessions should pass backend-aware interview options into the factory."""
sentinel_adapter = object()
state = SimpleNamespace(
interview_id="pm-session-123",
is_complete=True,
rounds=[],
)
engine = SimpleNamespace(
get_opening_question=lambda: "What do you want to build?",
ask_opening_and_start=AsyncMock(return_value=Result.ok(state)),
deferred_items=[],
decide_later_items=[],
codebase_context="",
format_decide_later_summary=lambda: "",
_reframe_map={},
_selected_brownfield_repos=[],
classifications=[],
)

with (
patch(
"ouroboros.cli.commands.pm.create_llm_adapter", return_value=sentinel_adapter
) as mock_factory,
patch(
"ouroboros.bigbang.pm_interview.PMInterviewEngine.create", return_value=engine
) as mock_create,
patch("ouroboros.cli.commands.pm._check_existing_pm_seeds", return_value=True),
patch("ouroboros.cli.commands.pm._load_brownfield_from_db", return_value=[]),
patch("ouroboros.cli.commands.pm._select_repos", return_value=[]),
patch("ouroboros.cli.commands.pm._save_cli_pm_meta"),
patch("ouroboros.cli.commands.pm.console.input", return_value="Build a PM workflow"),
):
asyncio.run(
_run_pm_interview(
resume_id=None,
model="default",
backend="codex",
debug=True,
output_dir=None,
)
)

mock_factory.assert_called_once()
factory_kwargs = mock_factory.call_args.kwargs
assert factory_kwargs["backend"] == "codex"
assert factory_kwargs["use_case"] == "interview"
assert factory_kwargs["allowed_tools"] is None
assert factory_kwargs["max_turns"] == 5
assert callable(factory_kwargs["on_message"])
assert factory_kwargs["cwd"] == Path.cwd()
mock_create.assert_called_once_with(llm_adapter=sentinel_adapter, model="default")
engine.ask_opening_and_start.assert_called_once_with(
user_response="Build a PM workflow",
brownfield_repos=None,
)


def test_pm_command_uses_backend_safe_default_model() -> None:
"""CLI entrypoint should normalize the default model for the configured backend."""
ctx = SimpleNamespace(invoked_subcommand=None)

with (
patch("ouroboros.cli.commands.pm.get_llm_backend", return_value="codex"),
patch("ouroboros.cli.commands.pm.get_clarification_model", return_value="default"),
patch("ouroboros.cli.commands.pm.resolve_llm_backend", return_value="codex"),
patch(
"ouroboros.cli.commands.pm.resolve_llm_permission_mode",
return_value="bypassPermissions",
),
patch(
"ouroboros.cli.commands.pm._run_pm_interview", new=Mock(return_value=object())
) as mock_run,
patch("ouroboros.cli.commands.pm.asyncio.run"),
patch("ouroboros.cli.commands.pm.print_warning") as mock_warning,
):
pm_command(
ctx=ctx,
resume=None,
output=None,
model=None,
debug=False,
)

mock_run.assert_called_once_with(
resume_id=None,
model="default",
backend="codex",
debug=False,
output_dir=None,
)
mock_warning.assert_called_once()
assert "bypassPermissions" in mock_warning.call_args.args[0]


def test_pm_command_formats_factory_errors() -> None:
"""Backend/config errors should exit cleanly instead of surfacing a traceback."""
ctx = SimpleNamespace(invoked_subcommand=None)

with (
patch("ouroboros.cli.commands.pm.get_llm_backend", return_value="opencode"),
patch("ouroboros.cli.commands.pm.get_clarification_model", return_value="default"),
patch("ouroboros.cli.commands.pm.print_error") as mock_error,
):
try:
pm_command(
ctx=ctx,
resume=None,
output=None,
model=None,
debug=False,
)
except typer.Exit as exc:
assert exc.exit_code == 1
else:
raise AssertionError("Expected typer.Exit for backend configuration errors")

mock_error.assert_called_once_with(
"OpenCode LLM adapter is not yet available. Supported backends: claude_code, codex, litellm"
)
Loading