From 804f90193af718ded7957ab8b9a2d5d060a67097 Mon Sep 17 00:00:00 2001 From: "dahyman91@gmail.com" Date: Tue, 24 Mar 2026 14:55:12 -0400 Subject: [PATCH 1/2] [dh] feat: add `coast harness-setup-prompt` command and integrate into installation flow Add a new CLI command that prints harness-specific setup prompts for AI coding agents. Accepts multiple --harness flags and outputs a combined prompt with section headers and skip instructions when multiple harnesses are selected. The installation prompt now seamlessly transitions into skills setup (step 11) after the Coastfile is configured, recommending the user set up harness skills for the harnesses they selected. --- .../src/commands/harness_setup_prompt.rs | 165 ++++++++++++++++++ coast-cli/src/commands/mod.rs | 1 + coast-cli/src/lib.rs | 4 + docs/installation_prompt.txt | 11 ++ 4 files changed, 181 insertions(+) create mode 100644 coast-cli/src/commands/harness_setup_prompt.rs diff --git a/coast-cli/src/commands/harness_setup_prompt.rs b/coast-cli/src/commands/harness_setup_prompt.rs new file mode 100644 index 0000000..76c71bb --- /dev/null +++ b/coast-cli/src/commands/harness_setup_prompt.rs @@ -0,0 +1,165 @@ +/// `coast harness-setup-prompt` — print harness-specific setup prompts for AI coding agents. +/// +/// This command is standalone and does not require the daemon to be running. +/// The prompt texts are compiled into the binary via `include_str!()`. +/// +/// Accepts one or more `--harness` flags. When called with multiple harnesses, +/// the output includes section headers and a preamble telling the agent which +/// steps to skip (CLI check and worktree_dir update are already handled by the +/// installation flow). +use anyhow::{bail, Result}; +use clap::Args; + +const CLAUDE_CODE: &str = include_str!("../../../docs/harnesses/claude_code_setup_prompt.txt"); +const CODEX: &str = include_str!("../../../docs/harnesses/codex_setup_prompt.txt"); +const CURSOR: &str = include_str!("../../../docs/harnesses/cursor_setup_prompt.txt"); +const CONDUCTOR: &str = include_str!("../../../docs/harnesses/conductor_setup_prompt.txt"); +const T3_CODE: &str = include_str!("../../../docs/harnesses/t3_code_setup_prompt.txt"); +const SHEP: &str = include_str!("../../../docs/harnesses/shep_setup_prompt.txt"); + +/// Known harness names and their display labels. +const KNOWN_HARNESSES: &[(&str, &str)] = &[ + ("claude-code", "Claude Code"), + ("codex", "OpenAI Codex"), + ("cursor", "Cursor"), + ("conductor", "Conductor"), + ("t3-code", "T3 Code"), + ("shep", "Shep"), +]; + +/// Arguments for `coast harness-setup-prompt`. +#[derive(Debug, Args)] +pub struct HarnessSetupPromptArgs { + /// Harness(es) to print setup prompts for. Can be specified multiple times. + /// Valid values: claude-code, codex, cursor, conductor, t3-code, shep + #[arg(long = "harness", required = true)] + harnesses: Vec, +} + +fn resolve_prompt(name: &str) -> Result<(&str, &str)> { + match name { + "claude-code" => Ok(("Claude Code", CLAUDE_CODE)), + "codex" => Ok(("OpenAI Codex", CODEX)), + "cursor" => Ok(("Cursor", CURSOR)), + "conductor" => Ok(("Conductor", CONDUCTOR)), + "t3-code" => Ok(("T3 Code", T3_CODE)), + "shep" => Ok(("Shep", SHEP)), + _ => { + let valid: Vec<&str> = KNOWN_HARNESSES.iter().map(|(k, _)| *k).collect(); + bail!( + "Unknown harness: {name}\nValid harnesses: {}", + valid.join(", ") + ); + } + } +} + +/// Print the harness setup prompt(s) to stdout. +pub async fn execute(args: &HarnessSetupPromptArgs) -> Result<()> { + // Validate all harness names up front before printing anything. + let resolved: Vec<(&str, &str)> = args + .harnesses + .iter() + .map(|h| resolve_prompt(h)) + .collect::>>()?; + + let multiple = resolved.len() > 1; + + if multiple { + print!( + "\ +=== HARNESS SKILLS SETUP === + +You are setting up Coast skills for multiple harnesses. Process each section +below in order, completing one harness before moving to the next. + +For EVERY harness below, skip these steps (they are already done): +- Step 1 (Check for Coast CLI) — already verified. +- The \"Update the Coastfile\" / worktree_dir step — already configured. + +" + ); + } + + for (i, (label, prompt)) in resolved.iter().enumerate() { + if multiple { + print!("=== HARNESS {}: {} ===\n\n", i + 1, label); + } + print!("{prompt}"); + if !prompt.ends_with('\n') { + println!(); + } + if multiple && i + 1 < resolved.len() { + println!(); + } + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use clap::Parser; + + #[derive(Debug, Parser)] + struct TestCli { + #[command(flatten)] + args: HarnessSetupPromptArgs, + } + + #[test] + fn test_parse_single_harness() { + let cli = TestCli::try_parse_from(["test", "--harness", "claude-code"]).unwrap(); + assert_eq!(cli.args.harnesses, vec!["claude-code"]); + } + + #[test] + fn test_parse_multiple_harnesses() { + let cli = TestCli::try_parse_from([ + "test", + "--harness", + "claude-code", + "--harness", + "codex", + "--harness", + "cursor", + ]) + .unwrap(); + assert_eq!(cli.args.harnesses, vec!["claude-code", "codex", "cursor"]); + } + + #[test] + fn test_requires_at_least_one_harness() { + let result = TestCli::try_parse_from(["test"]); + assert!(result.is_err()); + } + + #[test] + fn test_resolve_all_known_harnesses() { + for (key, expected_label) in KNOWN_HARNESSES { + let (label, prompt) = resolve_prompt(key).unwrap(); + assert_eq!(label, *expected_label); + assert!(!prompt.is_empty(), "prompt for {key} should not be empty"); + } + } + + #[test] + fn test_resolve_unknown_harness() { + let result = resolve_prompt("vim"); + assert!(result.is_err()); + let err = result.unwrap_err().to_string(); + assert!(err.contains("Unknown harness: vim")); + assert!(err.contains("claude-code")); + } + + #[test] + fn test_prompts_contain_expected_content() { + assert!(CLAUDE_CODE.contains("Claude Code")); + assert!(CODEX.contains("Codex")); + assert!(CURSOR.contains("Cursor")); + assert!(CONDUCTOR.contains("Conductor")); + assert!(T3_CODE.contains("T3 Code")); + assert!(SHEP.contains("Shep")); + } +} diff --git a/coast-cli/src/commands/mod.rs b/coast-cli/src/commands/mod.rs index a924009..499ad68 100644 --- a/coast-cli/src/commands/mod.rs +++ b/coast-cli/src/commands/mod.rs @@ -16,6 +16,7 @@ pub mod docker; pub mod docs; pub mod doctor; pub mod exec; +pub mod harness_setup_prompt; pub mod installation_prompt; pub mod logs; pub mod lookup; diff --git a/coast-cli/src/lib.rs b/coast-cli/src/lib.rs index 5f376d3..4dc8450 100644 --- a/coast-cli/src/lib.rs +++ b/coast-cli/src/lib.rs @@ -62,6 +62,9 @@ pub enum Commands { /// Print the Coast runtime skills prompt for AI coding agents. #[command(name = "skills-prompt")] SkillsPrompt(commands::skills_prompt::SkillsPromptArgs), + /// Print harness-specific setup prompts for AI coding agents. + #[command(name = "harness-setup-prompt")] + HarnessSetupPrompt(commands::harness_setup_prompt::HarnessSetupPromptArgs), // --- Project-explicit commands (project as positional arg or self-resolved) --- /// Build a coast image from a Coastfile. @@ -257,6 +260,7 @@ async fn dispatch(cli: Cli) -> Result<()> { Commands::SearchDocs(args) => commands::search_docs::execute(&args).await, Commands::InstallationPrompt(args) => commands::installation_prompt::execute(&args).await, Commands::SkillsPrompt(args) => commands::skills_prompt::execute(&args).await, + Commands::HarnessSetupPrompt(args) => commands::harness_setup_prompt::execute(&args).await, // --- Project-explicit commands --- Commands::Build(args) => commands::build::execute(&args).await, diff --git a/docs/installation_prompt.txt b/docs/installation_prompt.txt index 8f9e158..9e0e55b 100644 --- a/docs/installation_prompt.txt +++ b/docs/installation_prompt.txt @@ -236,3 +236,14 @@ Assign strategy — what happens when switching a Coast between worktrees: 8. Run `coast build`. If it fails, check the error and consult the docs (`coast search-docs ""`) to troubleshoot. 9. Run `coast run dev-1`. If it fails, check the error and consult the docs. 10. Run `coast ui` to open the Coastguard dashboard (this is for the user when you are done). +11. **Set up harness skills (Recommended).** Now that the Coastfile is configured and the Coast is running, it is strongly recommended to set up the Coast skills for the harnesses the user selected in step 3. These skills teach the AI agent in each harness how to use Coasts — without them, the harness will not know how to run tests, read logs, or manage Coast instances. + + Ask the user: "Your Coast is up and running! I'd recommend setting up the Coast skills for [list the harnesses from step 3] now — this only takes a moment and will let your AI coding tools work with Coasts automatically. Want me to do that?" + + If the user agrees, run: + + coast harness-setup-prompt --harness --harness ... + + passing every harness the user selected in step 3. Follow the instructions in the output to set up each harness. Since you already verified the CLI in step 1 and configured `worktree_dir` in step 3, skip those steps in the harness instructions (the output will remind you of this when multiple harnesses are present). + + If the user declines, let them know they can set up skills later by running `coast harness-setup-prompt --harness ` and following the instructions. From 66be2921a437bac595b3c539e30ed5093dca87d2 Mon Sep 17 00:00:00 2001 From: "dahyman91@gmail.com" Date: Tue, 24 Mar 2026 15:02:39 -0400 Subject: [PATCH 2/2] [dh] docs: add youtube videos to harnesses README and Cursor page --- coast-guard/src/generated/docs-manifest.json | 6 +++--- docs/harnesses/CURSOR.md | 4 ++++ docs/harnesses/README.md | 4 ++++ 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/coast-guard/src/generated/docs-manifest.json b/coast-guard/src/generated/docs-manifest.json index bf38c85..0e72239 100644 --- a/coast-guard/src/generated/docs-manifest.json +++ b/coast-guard/src/generated/docs-manifest.json @@ -348,7 +348,7 @@ "GETTING_STARTED.md": "# Getting Started with Coasts\n\n```youtube\nJe921fgJ4RY\nPart of the [Coasts Video Course](learn-coasts-videos/README.md).\n```\n\n## Installing\n\n```bash\neval \"$(curl -fsSL https://coasts.dev/install)\"\ncoast daemon install\n```\n\n*If you decide not to run `coast daemon install`, you are responsible for starting the daemon manually with `coast daemon start` every single time.*\n\n## Requirements\n\n- macOS or Linux\n- Docker Desktop on macOS, or Docker Engine with the Compose plugin on Linux\n- A project using Git\n- Node.js\n- `socat` (`brew install socat` on macOS, `sudo apt install socat` on Ubuntu)\n\n```text\nLinux note: Dynamic ports work out of the box on Linux.\nIf you need canonical ports below `1024`, see the checkout docs for the required host configuration.\n```\n\n## Setting Up Coasts in a Project\n\nAdd a Coastfile to the root of your project. Make sure you are not on a worktree when installing.\n\n```text\nmy-project/\n├── Coastfile <-- this is what Coast reads\n├── docker-compose.yml\n├── Dockerfile\n├── src/\n│ └── ...\n└── ...\n```\n\nThe `Coastfile` points at your existing local development resources and adds Coasts-specific configuration — see the [Coastfiles documentation](coastfiles/README.md) for the full schema:\n\n```toml\n[coast]\nname = \"my-project\"\ncompose = \"./docker-compose.yml\"\n\n[ports]\nweb = 3000\ndb = 5432\n```\n\nA Coastfile is a lightweight TOML file that *typically* points to your existing `docker-compose.yml` (it also works with non-containerized local dev setups) and describes the modifications needed to run your project in parallel — port mappings, volume strategies, and secrets. Place it at your project root.\n\nThe fastest way to create a Coastfile for your project is to let your coding agent do it.\n\nThe Coasts CLI ships with a built-in prompt that teaches any AI agent the full Coastfile schema and CLI. Copy it into your agent's chat and it will analyze your project and generate a Coastfile.\n\n```prompt-copy\ninstallation_prompt.txt\n```\n\nYou can also get the same output from the CLI by running `coast installation-prompt`.\n\n## Your First Coast\n\nBefore starting your first Coast, bring down any running development environment. If you are using Docker Compose, run `docker-compose down`. If you have local dev servers running, stop them. Coasts manage their own ports and will conflict with anything already listening.\n\nOnce your Coastfile is ready:\n\n```bash\ncoast build\ncoast run dev-1\n```\n\nCheck that your instance is running:\n\n```bash\ncoast ls\n\n# NAME PROJECT STATUS BRANCH RUNTIME WORKTREE CO ROOT\n# dev-1 my-project running main dind - ~/dev/my-project\n```\n\nSee where your services are listening:\n\n```bash\ncoast ports dev-1\n\n# SERVICE CANONICAL DYNAMIC\n# ★ web 3000 62217\n# db 5432 55681\n```\n\nEach instance gets its own set of dynamic ports so multiple instances can run side by side. To map an instance back to your project's canonical ports, check it out:\n\n```bash\ncoast checkout dev-1\n```\n\nThis means the runtime is now checked out and your project's canonical ports (like `3000`, `5432`) will route to this Coast instance.\n\n```bash\ncoast ls\n\n# NAME PROJECT STATUS BRANCH RUNTIME WORKTREE CO ROOT\n# dev-1 my-project running main dind - ✓ ~/dev/my-project\n```\n\nTo bring up the Coastguard observability UI for your project:\n\n```bash\ncoast ui\n```\n\n## What's Next?\n\n- Set up a [skill for your host agent](SKILLS_FOR_HOST_AGENTS.md) so it knows how to interact with Coasts\n", "SKILLS_FOR_HOST_AGENTS.md": "# Skills for Host Agents\n\nIf you use AI coding agents on the host while your app runs inside Coasts, your\nagent usually needs two Coast-specific pieces of setup:\n\n1. an always-on Coast Runtime section in the harness's project instruction file\n or rule file\n2. a reusable Coast workflow skill such as `/coasts` when the harness supports\n project skills\n\nWithout the first piece, the agent edits files but forgets to use `coast exec`.\nWithout the second, every Coast assignment, log, and UI flow has to be\nre-explained in chat.\n\nThis guide keeps the setup concrete and Coast-specific: which file to create,\nwhat text goes in it, and how that changes by harness.\n\n## Why agents need this\n\nCoasts share the [filesystem](concepts_and_terminology/FILESYSTEM.md) between\nyour host machine and the Coast container. Your agent edits files on the host\nand the running services inside the Coast see the changes immediately. But the\nagent still needs to:\n\n1. discover which Coast instance matches the current checkout\n2. run tests, builds, and runtime commands inside that Coast\n3. read logs and service status from the Coast\n4. handle worktree assignment safely when no Coast is already attached\n\n## What goes where\n\n- `AGENTS.md`, `CLAUDE.md`, or `.cursor/rules/coast.md` — short Coast rules\n that should apply on every task, even if no skill is invoked\n- skill (`.agents/skills/...`, `.claude/skills/...`, or `.cursor/skills/...`)\n — the reusable Coast workflow itself, such as `/coasts`\n- command file (`.claude/commands/...` or `.cursor/commands/...`) — optional\n explicit entrypoint for harnesses that support it; one simple option is to\n have the command reuse the skill\n\nIf one repo uses more than one harness, keep the canonical Coast skill in one\nplace and expose it where needed. See\n[Multiple Harnesses](harnesses/MULTIPLE_HARNESSES.md).\n\n## 1. Always-on Coast Runtime rules\n\nAdd the following block to the harness's always-on project instruction file or\nrule file (`AGENTS.md`, `CLAUDE.md`, `.cursor/rules/coast.md`, or equivalent):\n\n```text-copy\n# Coast Runtime\n\nThis project uses Coasts — containerized runtimes for running services, tests,\nand other runtime commands. The filesystem is shared between the host and the\ncontainer, so file edits on either side are visible to both immediately.\n\n## Discovery\n\nBefore the first runtime command in a session, run:\n\n coast lookup\n\nThis prints the instance name, ports, and example commands. Use the instance\nname from the output for all subsequent commands.\n\n## What runs where\n\nThe filesystem is shared, so only use `coast exec` for things that need the\ncontainer runtime (databases, services, integration tests). Everything else\nruns directly on the host.\n\nUse `coast exec` for:\n- Tests that need running services (integration tests, API tests)\n- Service restarts or compose operations\n- Anything that talks to databases, caches, or other container services\n\nRun directly on the host:\n- Linting, typechecking, formatting\n- Git operations\n- Playwright and browser tests\n- Installing host-side dependencies (npm install, pip install)\n- File search, code generation, static analysis\n\nExample:\n\n coast exec -- sh -c \"cd && npm test\" # needs DB\n coast exec --service # service shell\n npm run lint # host is fine\n npx playwright test # host is fine\n\n## Runtime feedback\n\n coast ps \n coast logs --service \n coast logs --service --tail 50\n\n## Creating and assigning Coasts\n\nIf `coast lookup` returns no match, run `coast ls` to see what exists.\n\nIf an unassigned Coast is already running for this project, prefer assigning\nyour worktree to it rather than creating a new one:\n\n coast assign -w \n\nIf no Coast is running, ask the user before creating one — Coasts can be\nmemory intensive:\n\n coast run -w \n\nA project must be built before instances can be created. If `coast run` fails\nbecause no build exists, run `coast build` first.\n\n## Coastfile setup\n\nIf the project does not have a Coastfile yet, or if you need to modify the\nCoastfile, read the Coastfile docs first:\n\n coast docs --path coastfiles/README.md\n\n## When confused\n\nBefore guessing about Coast behavior, explore the docs:\n\n coast docs # list all doc pages\n coast docs --path concepts_and_terminology/RUN.md\n coast docs --path concepts_and_terminology/ASSIGN.md\n coast docs --path concepts_and_terminology/BUILDS.md\n coast search-docs \"your question here\" # semantic search\n\n## Troubleshooting\n\nIf you run into issues with harness configuration (e.g. worktrees not being\nfound, `coast lookup` not matching), read the troubleshooting section of the\nrelevant harness doc:\n\n coast docs --path harnesses/CLAUDE_CODE.md\n coast docs --path harnesses/CODEX.md\n coast docs --path harnesses/CONDUCTOR.md\n coast docs --path harnesses/CURSOR.md\n coast docs --path harnesses/T3_CODE.md\n coast docs --path harnesses/SHEP.md\n\n## Rules\n\n- Always run `coast lookup` before your first runtime command in a session.\n- Use `coast exec` only for things that need the container runtime.\n- Use `coast exec --service ` when you need to run inside an app/service container.\n- Run linting, typechecking, formatting, and git on the host directly.\n- Use `coast docs` or `coast search-docs` before guessing about Coast behavior.\n- Do not run services directly on the host when the project expects Coast.\n```\n\nThis block belongs in the always-on file because the rules should apply on\nevery task, not only when the agent explicitly enters a `/coasts` workflow.\n\n## 2. Reusable `/coasts` skill\n\nWhen the harness supports project skills, save the skill content as a\n`SKILL.md` in your skills directory. The full skill text is in\n[skills_prompt.txt](skills_prompt.txt) (if in CLI mode, use\n`coast skills-prompt`) — everything after the Coast Runtime block is the skill\ncontent, starting from the `---` frontmatter.\n\nIf you are using Codex or OpenAI-specific surfaces, you can optionally add\n`agents/openai.yaml` beside the skill for display metadata or invocation\npolicy. That metadata should live beside the skill, not replace it.\n\n## Harness quick start\n\n| Harness | Always-on file | Reusable Coast workflow | Notes |\n|---------|----------------|-------------------------|-------|\n| OpenAI Codex | `AGENTS.md` | `.agents/skills/coasts/SKILL.md` | No separate project command file to recommend for Coast docs. See [Codex](harnesses/CODEX.md). |\n| Claude Code | `CLAUDE.md` | `.claude/skills/coasts/SKILL.md` | `.claude/commands/coasts.md` is optional, but keep the logic in the skill. See [Claude Code](harnesses/CLAUDE_CODE.md). |\n| Cursor | `AGENTS.md` or `.cursor/rules/coast.md` | `.cursor/skills/coasts/SKILL.md` or shared `.agents/skills/coasts/SKILL.md` | `.cursor/commands/coasts.md` is optional. `.cursor/worktrees.json` is for Cursor worktree bootstrap, not Coast policy. See [Cursor](harnesses/CURSOR.md). |\n| Conductor | `CLAUDE.md` | Start with `CLAUDE.md`; use Conductor scripts and settings for Conductor-specific behavior | Do not assume full Claude Code project command behavior. If a new command does not appear, fully close and reopen Conductor. See [Conductor](harnesses/CONDUCTOR.md). |\n| T3 Code | `AGENTS.md` | `.agents/skills/coasts/SKILL.md` | This is the most limited harness surface here. Use the Codex-style layout and do not invent a T3-native command layer for Coast docs. See [T3 Code](harnesses/T3_CODE.md). |\n\n## Let the agent set itself up\n\nThe fastest way is to let the agent write the right files itself. Copy the\nprompt below into your agent's chat — it includes the Coast Runtime block, the\n`coasts` skill block, and harness-specific instructions for where each piece\nbelongs.\n\n```prompt-copy\nskills_prompt.txt\n```\n\nYou can also get the same output from the CLI by running `coast skills-prompt`.\n\n## Manual setup\n\n- **Codex:** put the Coast Runtime section in `AGENTS.md`, then put the\n reusable `coasts` skill in `.agents/skills/coasts/SKILL.md`.\n- **Claude Code:** put the Coast Runtime section in `CLAUDE.md`, then put the\n reusable `coasts` skill in `.claude/skills/coasts/SKILL.md`. Only add\n `.claude/commands/coasts.md` if you specifically want a command file.\n- **Cursor:** put the Coast Runtime section in `AGENTS.md` if you want the most\n portable instructions, or in `.cursor/rules/coast.md` if you want a\n Cursor-native project rule. Put the reusable `coasts` workflow in\n `.cursor/skills/coasts/SKILL.md` for a Cursor-only repo, or in\n `.agents/skills/coasts/SKILL.md` if the repo is shared with other harnesses.\n Only add `.cursor/commands/coasts.md` if you specifically want an explicit\n command file.\n- **Conductor:** put the Coast Runtime section in `CLAUDE.md`. Use Conductor\n Repository Settings scripts for Conductor-specific bootstrap or run behavior.\n If you add a command and it does not appear, fully close and reopen the app.\n- **T3 Code:** use the same layout as Codex: `AGENTS.md` plus\n `.agents/skills/coasts/SKILL.md`. Treat T3 Code as a thin Codex-style\n harness here, not as a separate Coast command surface.\n- **Multiple harnesses:** keep the canonical skill in\n `.agents/skills/coasts/SKILL.md`. Cursor can load that directly; expose it to\n Claude Code through `.claude/skills/coasts/` if needed.\n\n## Further reading\n\n- Read the [Harnesses guide](harnesses/README.md) for the per-harness matrix\n- Read [Multiple Harnesses](harnesses/MULTIPLE_HARNESSES.md) for the shared\n layout pattern\n- Read the [Coastfiles documentation](coastfiles/README.md) to learn the full\n configuration schema\n- Learn the [Coast CLI](concepts_and_terminology/CLI.md) commands for managing\n instances\n- Explore [Coastguard](concepts_and_terminology/COASTGUARD.md), the web UI for\n observing and controlling your Coasts\n", "doc_ordering.txt": "# Top-level\nREADME.md\nGETTING_STARTED.md\nSKILLS_FOR_HOST_AGENTS.md\n\n# Learn Coasts\nlearn-coasts-videos/README.md\nlearn-coasts-videos/coasts.md\nlearn-coasts-videos/ports.md\nlearn-coasts-videos/assign.md\nlearn-coasts-videos/checkout.md\nlearn-coasts-videos/volumes.md\nlearn-coasts-videos/secrets.md\nlearn-coasts-videos/getting-started.md\nlearn-coasts-videos/coast-ui.md\n\n# Harnesses\nharnesses/README.md\nharnesses/CODEX.md\nharnesses/CONDUCTOR.md\nharnesses/CLAUDE_CODE.md\nharnesses/CURSOR.md\nharnesses/T3_CODE.md\nharnesses/SHEP.md\nharnesses/MULTIPLE_HARNESSES.md\n\n# Concepts and Terminology\nconcepts_and_terminology/README.md\nconcepts_and_terminology/COASTS.md\nconcepts_and_terminology/RUN.md\nconcepts_and_terminology/REMOVE.md\nconcepts_and_terminology/FILESYSTEM.md\nconcepts_and_terminology/DAEMON.md\nconcepts_and_terminology/CLI.md\nconcepts_and_terminology/COASTGUARD.md\nconcepts_and_terminology/PORTS.md\nconcepts_and_terminology/PRIMARY_PORT_AND_DNS.md\nconcepts_and_terminology/ASSIGN.md\nconcepts_and_terminology/CHECKOUT.md\nconcepts_and_terminology/LOOKUP.md\nconcepts_and_terminology/VOLUMES.md\nconcepts_and_terminology/SHARED_SERVICES.md\nconcepts_and_terminology/SECRETS.md\nconcepts_and_terminology/BUILDS.md\nconcepts_and_terminology/COASTFILE_TYPES.md\nconcepts_and_terminology/RUNTIMES_AND_SERVICES.md\nconcepts_and_terminology/BARE_SERVICES.md\nconcepts_and_terminology/MIXED_SERVICE_TYPES.md\nconcepts_and_terminology/LOGS.md\nconcepts_and_terminology/EXEC_AND_DOCKER.md\nconcepts_and_terminology/AGENT_SHELLS.md\nconcepts_and_terminology/MCP_SERVERS.md\nconcepts_and_terminology/PERFORMANCE_OPTIMIZATIONS.md\nconcepts_and_terminology/TROUBLESHOOTING.md\n\n# Coastfiles\ncoastfiles/README.md\ncoastfiles/PROJECT.md\ncoastfiles/WORKTREE_DIR.md\ncoastfiles/PORTS.md\ncoastfiles/SHARED_SERVICES.md\ncoastfiles/SERVICES.md\ncoastfiles/SECRETS.md\ncoastfiles/VOLUMES.md\ncoastfiles/ASSIGN.md\ncoastfiles/INHERITANCE.md\ncoastfiles/AGENT_SHELL.md\ncoastfiles/MCP.md\n\n# Recipes\nrecipes/README.md\nrecipes/FULLSTACK_MONOREPO.md\n\n", - "installation_prompt.txt": "You are installing Coasts into this project. Coast (containerized host) is a CLI tool that runs multiple isolated development environments on a single machine using Docker-in-Docker containers. Each environment gets its own ports, volumes, and runtime — ideal for parallel worktree workflows.\n\nYour job: analyze this project and generate a Coastfile (a TOML file named \"Coastfile\" at the project root).\n\n=== DOCUMENTATION ===\n\nCoast has built-in docs accessible from the CLI. Use these to understand the full Coastfile schema, volume strategies, assign behavior, and other configuration options before generating a Coastfile.\n\nBrowse the docs tree:\n\n coast docs\n\nThis prints the full docs tree. Start by reading the README files — they provide indexes to help you find the right documentation for each topic:\n\n coast docs --path README.md\n coast docs --path coastfiles/README.md\n coast docs --path concepts_and_terminology/README.md\n\nRead a specific doc:\n\n coast docs --path coastfiles/PROJECT.md\n coast docs --path coastfiles/VOLUMES.md\n\nSearch the docs (semantic search — describe what you're looking for in natural language):\n\n coast search-docs \"how do volume strategies work\"\n coast search-docs \"shared postgres across instances\"\n coast search-docs \"secret injection from environment variables\"\n\nUse the docs to make informed decisions about this project's Coastfile configuration. The coastfiles/ section covers every Coastfile directive in detail.\n\n=== COASTFILE SCHEMA (quick reference) ===\n\n[coast] — Required. Project metadata.\n\n name (string, required) Project identifier used in container/volume naming.\n compose (string, optional) Path to docker-compose.yml relative to the Coastfile.\n runtime (string, optional) \"dind\" (default), \"sysbox\", or \"podman\".\n root (string, optional) Project root override (relative or absolute).\n worktree_dir (string or array, optional) Directory or directories for git worktrees (default: \".worktrees\"). Accepts a single string or an array of strings. Auto-detected from existing worktrees at runtime.\n\n[coast.setup] — Optional. Customize the DinD container itself.\n\n packages (array of strings) Alpine packages to install (e.g. [\"nodejs\", \"npm\", \"git\"]).\n run (array of strings) Arbitrary commands to run during setup.\n\n[ports] — Required (at least one). Map of logical name to port number.\n These ports are forwarded to the host when the coast is checked out.\n\n Example:\n [ports]\n web = 3000\n api = 8080\n postgres = 5432\n\n[volumes.*] — Optional. Per-volume configuration.\n\n strategy \"isolated\" (default) or \"shared\"\n service Compose service name that owns this volume.\n mount Mount path inside the service container.\n snapshot_source (isolated only) Seed from an existing volume name.\n\n Example:\n [volumes.postgres_data]\n strategy = \"isolated\"\n service = \"db\"\n mount = \"/var/lib/postgresql/data\"\n\n[secrets.*] — Optional. Secret extraction and injection.\n\n extractor \"file\", \"env\", \"command\", or \"macos-keychain\"\n inject \"env:VAR_NAME\" or \"file:/path/in/container\"\n ttl Optional expiry (e.g. \"1h\", \"30m\").\n\n Extractor-specific params:\n file: path = \"./path/to/secret\"\n env: var = \"HOST_ENV_VAR\"\n command: run = \"echo secret-value\"\n macos-keychain: item = \"keychain-item-name\"\n\n Example:\n [secrets.db_password]\n extractor = \"env\"\n var = \"DB_PASSWORD\"\n inject = \"env:DATABASE_PASSWORD\"\n\n[inject] — Optional. Non-secret host file/env injection.\n\n env Array of host env var names to forward.\n files Array of host file paths to mount.\n\n Example:\n [inject]\n env = [\"NODE_ENV\", \"DEBUG\"]\n files = [\"~/.ssh/id_ed25519\", \"~/.gitconfig\"]\n\n[shared_services.*] — Optional. Services on the host Docker daemon shared across instances.\n\n image Docker image.\n ports Array of port numbers.\n volumes Array of volume mounts.\n env Inline table of environment variables.\n auto_create_db (bool) Create a per-instance database automatically.\n inject Inject connection string into coast containers.\n\n Example:\n [shared_services.postgres]\n image = \"postgres:16-alpine\"\n ports = [5432]\n volumes = [\"postgres_data:/var/lib/postgresql/data\"]\n env = { POSTGRES_USER = \"dev\", POSTGRES_PASSWORD = \"dev\", POSTGRES_DB = \"app\" }\n auto_create_db = true\n inject = \"env:DATABASE_URL\"\n\n[assign] — Optional. Controls what happens on branch switch (coast assign).\n\n default \"none\", \"restart\", or \"rebuild\"\n [assign.services] Per-service overrides.\n [assign.rebuild_triggers] Per-service file globs that trigger rebuild.\n\n Example:\n [assign]\n default = \"none\"\n [assign.services]\n api = \"restart\"\n worker = \"rebuild\"\n [assign.rebuild_triggers]\n worker = [\"Dockerfile\", \"package.json\"]\n\n[services.*] — Optional. Bare process services (no docker-compose needed).\n\n command Shell command to run.\n port Port number.\n restart \"on-failure\" or \"always\".\n\n Example:\n [services.web]\n command = \"node server.js\"\n port = 3000\n restart = \"on-failure\"\n\n=== EXAMPLE: Minimal (no compose) ===\n\n[coast]\nname = \"my-app\"\nruntime = \"dind\"\n\n[coast.setup]\npackages = [\"nodejs\", \"npm\"]\n\n[ports]\napp = 3000\n\n=== EXAMPLE: With docker-compose ===\n\n[coast]\nname = \"my-app\"\ncompose = \"./docker-compose.yml\"\nruntime = \"dind\"\n\n[ports]\napp = 3000\npostgres = 5432\nredis = 6379\n\n[volumes.postgres_data]\nstrategy = \"shared\"\nservice = \"db\"\nmount = \"/var/lib/postgresql/data\"\n\n[volumes.redis_data]\nstrategy = \"isolated\"\nservice = \"cache\"\nmount = \"/data\"\n\n[assign]\ndefault = \"none\"\n\n[assign.services]\napp = \"rebuild\"\n\n=== EXAMPLE: With secrets ===\n\n[coast]\nname = \"my-app\"\ncompose = \"./docker-compose.yml\"\nruntime = \"dind\"\n\n[ports]\napp = 3000\n\n[secrets.api_key]\nextractor = \"env\"\nvar = \"API_KEY\"\ninject = \"env:API_KEY\"\n\n[secrets.ssh_key]\nextractor = \"file\"\npath = \"~/.ssh/id_ed25519\"\ninject = \"file:/run/secrets/ssh_key\"\n\n=== KEY TRADEOFFS TO DISCUSS WITH THE USER ===\n\nBefore generating the Coastfile, ask the user about any ambiguous configuration choices. Here are the main ones:\n\nDatabase and infrastructure strategy — there are three options for services like postgres and redis:\n - Isolated volumes (default): each Coast instance gets its own copy of the data inside its DinD container. Instances cannot interfere with each other. Best when you want per-branch database state.\n - Shared volumes: all instances read and write the same volume inside their DinD containers. Saves disk space but concurrent writes from multiple instances can corrupt data.\n - Shared services: run the database on the host Docker daemon instead of inside each Coast. All instances connect to one shared server. Uses the least memory, supports auto_create_db for per-instance databases on a single postgres, and data outlives instance deletion. Best for large teams or memory-constrained machines.\n - If the project has a database, ask the user which approach they want. Explain the tradeoffs — isolated is safest, shared services is most memory-efficient.\n\nAssign strategy — what happens when switching a Coast between worktrees:\n - \"none\": do nothing (for services like postgres/redis that don't change between branches).\n - \"restart\": restart the container (for interpreted services that just need a process restart).\n - \"rebuild\": rebuild the Docker image and restart (for services where the branch change affects the Dockerfile or build dependencies).\n - If the project has multiple services, ask which ones need rebuilding vs restarting on branch switch.\n\n=== INSTRUCTIONS ===\n\n1. Look at this project's structure. If there is a docker-compose.yml, read it to identify services, ports, and volumes.\n2. Detect the existing git worktree directory. Run `git worktree list` to check if the project already has git worktrees set up.\n - If worktrees exist, examine their paths to determine the common parent directory (e.g., if worktrees are at `../.worktrees/feat-a` and `../.worktrees/feat-b`, the worktree_dir is `\"../.worktrees\"`).\n - Set `worktree_dir` in the Coastfile to match the detected directory.\n - If no worktrees exist, omit `worktree_dir` (Coast defaults to \".worktrees\"). Do NOT use \".coasts\" — that pollutes the project with a Coast-branded directory.\n3. Ask the user if they use any of these coding harnesses with this project:\n - **Claude Code** — worktrees at `.claude/worktrees`\n - **OpenAI Codex** — worktrees at `~/.codex/worktrees`\n - **Cursor** — worktrees at `~/.cursor/worktrees/` (where `` is the coast name from `[coast] name`)\n - **Conductor** — worktrees at `~/conductor/workspaces/`\n - **T3 Code** — worktrees at `~/.t3/worktrees/`\n For each harness the user selects, include its worktree directory in the `worktree_dir` array. Combine these with any directory detected in step 2. If the user selects none and no worktrees were detected in step 2, omit `worktree_dir` (Coast defaults to \".worktrees\").\n4. Read the relevant Coast docs (use `coast docs` and `coast search-docs`) to understand volume strategies, assign behavior, and any configuration options that apply to this project's stack.\n5. Ask the user about any ambiguous configuration choices (see tradeoffs above). Do not guess — explain the options and let them decide.\n6. Generate a Coastfile at the project root based on the project analysis and user input.\n7. If the project has no docker-compose.yml, use [services.*] for bare process definitions or [coast.setup] to install dependencies.\n8. Run `coast build`. If it fails, check the error and consult the docs (`coast search-docs \"\"`) to troubleshoot.\n9. Run `coast run dev-1`. If it fails, check the error and consult the docs.\n10. Run `coast ui` to open the Coastguard dashboard (this is for the user when you are done).\n", + "installation_prompt.txt": "You are installing Coasts into this project. Coast (containerized host) is a CLI tool that runs multiple isolated development environments on a single machine using Docker-in-Docker containers. Each environment gets its own ports, volumes, and runtime — ideal for parallel worktree workflows.\n\nYour job: analyze this project and generate a Coastfile (a TOML file named \"Coastfile\" at the project root).\n\n=== DOCUMENTATION ===\n\nCoast has built-in docs accessible from the CLI. Use these to understand the full Coastfile schema, volume strategies, assign behavior, and other configuration options before generating a Coastfile.\n\nBrowse the docs tree:\n\n coast docs\n\nThis prints the full docs tree. Start by reading the README files — they provide indexes to help you find the right documentation for each topic:\n\n coast docs --path README.md\n coast docs --path coastfiles/README.md\n coast docs --path concepts_and_terminology/README.md\n\nRead a specific doc:\n\n coast docs --path coastfiles/PROJECT.md\n coast docs --path coastfiles/VOLUMES.md\n\nSearch the docs (semantic search — describe what you're looking for in natural language):\n\n coast search-docs \"how do volume strategies work\"\n coast search-docs \"shared postgres across instances\"\n coast search-docs \"secret injection from environment variables\"\n\nUse the docs to make informed decisions about this project's Coastfile configuration. The coastfiles/ section covers every Coastfile directive in detail.\n\n=== COASTFILE SCHEMA (quick reference) ===\n\n[coast] — Required. Project metadata.\n\n name (string, required) Project identifier used in container/volume naming.\n compose (string, optional) Path to docker-compose.yml relative to the Coastfile.\n runtime (string, optional) \"dind\" (default), \"sysbox\", or \"podman\".\n root (string, optional) Project root override (relative or absolute).\n worktree_dir (string or array, optional) Directory or directories for git worktrees (default: \".worktrees\"). Accepts a single string or an array of strings. Auto-detected from existing worktrees at runtime.\n\n[coast.setup] — Optional. Customize the DinD container itself.\n\n packages (array of strings) Alpine packages to install (e.g. [\"nodejs\", \"npm\", \"git\"]).\n run (array of strings) Arbitrary commands to run during setup.\n\n[ports] — Required (at least one). Map of logical name to port number.\n These ports are forwarded to the host when the coast is checked out.\n\n Example:\n [ports]\n web = 3000\n api = 8080\n postgres = 5432\n\n[volumes.*] — Optional. Per-volume configuration.\n\n strategy \"isolated\" (default) or \"shared\"\n service Compose service name that owns this volume.\n mount Mount path inside the service container.\n snapshot_source (isolated only) Seed from an existing volume name.\n\n Example:\n [volumes.postgres_data]\n strategy = \"isolated\"\n service = \"db\"\n mount = \"/var/lib/postgresql/data\"\n\n[secrets.*] — Optional. Secret extraction and injection.\n\n extractor \"file\", \"env\", \"command\", or \"macos-keychain\"\n inject \"env:VAR_NAME\" or \"file:/path/in/container\"\n ttl Optional expiry (e.g. \"1h\", \"30m\").\n\n Extractor-specific params:\n file: path = \"./path/to/secret\"\n env: var = \"HOST_ENV_VAR\"\n command: run = \"echo secret-value\"\n macos-keychain: item = \"keychain-item-name\"\n\n Example:\n [secrets.db_password]\n extractor = \"env\"\n var = \"DB_PASSWORD\"\n inject = \"env:DATABASE_PASSWORD\"\n\n[inject] — Optional. Non-secret host file/env injection.\n\n env Array of host env var names to forward.\n files Array of host file paths to mount.\n\n Example:\n [inject]\n env = [\"NODE_ENV\", \"DEBUG\"]\n files = [\"~/.ssh/id_ed25519\", \"~/.gitconfig\"]\n\n[shared_services.*] — Optional. Services on the host Docker daemon shared across instances.\n\n image Docker image.\n ports Array of port numbers.\n volumes Array of volume mounts.\n env Inline table of environment variables.\n auto_create_db (bool) Create a per-instance database automatically.\n inject Inject connection string into coast containers.\n\n Example:\n [shared_services.postgres]\n image = \"postgres:16-alpine\"\n ports = [5432]\n volumes = [\"postgres_data:/var/lib/postgresql/data\"]\n env = { POSTGRES_USER = \"dev\", POSTGRES_PASSWORD = \"dev\", POSTGRES_DB = \"app\" }\n auto_create_db = true\n inject = \"env:DATABASE_URL\"\n\n[assign] — Optional. Controls what happens on branch switch (coast assign).\n\n default \"none\", \"restart\", or \"rebuild\"\n [assign.services] Per-service overrides.\n [assign.rebuild_triggers] Per-service file globs that trigger rebuild.\n\n Example:\n [assign]\n default = \"none\"\n [assign.services]\n api = \"restart\"\n worker = \"rebuild\"\n [assign.rebuild_triggers]\n worker = [\"Dockerfile\", \"package.json\"]\n\n[services.*] — Optional. Bare process services (no docker-compose needed).\n\n command Shell command to run.\n port Port number.\n restart \"on-failure\" or \"always\".\n\n Example:\n [services.web]\n command = \"node server.js\"\n port = 3000\n restart = \"on-failure\"\n\n=== EXAMPLE: Minimal (no compose) ===\n\n[coast]\nname = \"my-app\"\nruntime = \"dind\"\n\n[coast.setup]\npackages = [\"nodejs\", \"npm\"]\n\n[ports]\napp = 3000\n\n=== EXAMPLE: With docker-compose ===\n\n[coast]\nname = \"my-app\"\ncompose = \"./docker-compose.yml\"\nruntime = \"dind\"\n\n[ports]\napp = 3000\npostgres = 5432\nredis = 6379\n\n[volumes.postgres_data]\nstrategy = \"shared\"\nservice = \"db\"\nmount = \"/var/lib/postgresql/data\"\n\n[volumes.redis_data]\nstrategy = \"isolated\"\nservice = \"cache\"\nmount = \"/data\"\n\n[assign]\ndefault = \"none\"\n\n[assign.services]\napp = \"rebuild\"\n\n=== EXAMPLE: With secrets ===\n\n[coast]\nname = \"my-app\"\ncompose = \"./docker-compose.yml\"\nruntime = \"dind\"\n\n[ports]\napp = 3000\n\n[secrets.api_key]\nextractor = \"env\"\nvar = \"API_KEY\"\ninject = \"env:API_KEY\"\n\n[secrets.ssh_key]\nextractor = \"file\"\npath = \"~/.ssh/id_ed25519\"\ninject = \"file:/run/secrets/ssh_key\"\n\n=== KEY TRADEOFFS TO DISCUSS WITH THE USER ===\n\nBefore generating the Coastfile, ask the user about any ambiguous configuration choices. Here are the main ones:\n\nDatabase and infrastructure strategy — there are three options for services like postgres and redis:\n - Isolated volumes (default): each Coast instance gets its own copy of the data inside its DinD container. Instances cannot interfere with each other. Best when you want per-branch database state.\n - Shared volumes: all instances read and write the same volume inside their DinD containers. Saves disk space but concurrent writes from multiple instances can corrupt data.\n - Shared services: run the database on the host Docker daemon instead of inside each Coast. All instances connect to one shared server. Uses the least memory, supports auto_create_db for per-instance databases on a single postgres, and data outlives instance deletion. Best for large teams or memory-constrained machines.\n - If the project has a database, ask the user which approach they want. Explain the tradeoffs — isolated is safest, shared services is most memory-efficient.\n\nAssign strategy — what happens when switching a Coast between worktrees:\n - \"none\": do nothing (for services like postgres/redis that don't change between branches).\n - \"restart\": restart the container (for interpreted services that just need a process restart).\n - \"rebuild\": rebuild the Docker image and restart (for services where the branch change affects the Dockerfile or build dependencies).\n - If the project has multiple services, ask which ones need rebuilding vs restarting on branch switch.\n\n=== INSTRUCTIONS ===\n\n1. Look at this project's structure. If there is a docker-compose.yml, read it to identify services, ports, and volumes.\n2. Detect the existing git worktree directory. Run `git worktree list` to check if the project already has git worktrees set up.\n - If worktrees exist, examine their paths to determine the common parent directory (e.g., if worktrees are at `../.worktrees/feat-a` and `../.worktrees/feat-b`, the worktree_dir is `\"../.worktrees\"`).\n - Set `worktree_dir` in the Coastfile to match the detected directory.\n - If no worktrees exist, omit `worktree_dir` (Coast defaults to \".worktrees\"). Do NOT use \".coasts\" — that pollutes the project with a Coast-branded directory.\n3. Ask the user if they use any of these coding harnesses with this project:\n - **Claude Code** — worktrees at `.claude/worktrees`\n - **OpenAI Codex** — worktrees at `~/.codex/worktrees`\n - **Cursor** — worktrees at `~/.cursor/worktrees/` (where `` is the coast name from `[coast] name`)\n - **Conductor** — worktrees at `~/conductor/workspaces/`\n - **T3 Code** — worktrees at `~/.t3/worktrees/`\n For each harness the user selects, include its worktree directory in the `worktree_dir` array. Combine these with any directory detected in step 2. If the user selects none and no worktrees were detected in step 2, omit `worktree_dir` (Coast defaults to \".worktrees\").\n4. Read the relevant Coast docs (use `coast docs` and `coast search-docs`) to understand volume strategies, assign behavior, and any configuration options that apply to this project's stack.\n5. Ask the user about any ambiguous configuration choices (see tradeoffs above). Do not guess — explain the options and let them decide.\n6. Generate a Coastfile at the project root based on the project analysis and user input.\n7. If the project has no docker-compose.yml, use [services.*] for bare process definitions or [coast.setup] to install dependencies.\n8. Run `coast build`. If it fails, check the error and consult the docs (`coast search-docs \"\"`) to troubleshoot.\n9. Run `coast run dev-1`. If it fails, check the error and consult the docs.\n10. Run `coast ui` to open the Coastguard dashboard (this is for the user when you are done).\n11. **Set up harness skills (Recommended).** Now that the Coastfile is configured and the Coast is running, it is strongly recommended to set up the Coast skills for the harnesses the user selected in step 3. These skills teach the AI agent in each harness how to use Coasts — without them, the harness will not know how to run tests, read logs, or manage Coast instances.\n\n Ask the user: \"Your Coast is up and running! I'd recommend setting up the Coast skills for [list the harnesses from step 3] now — this only takes a moment and will let your AI coding tools work with Coasts automatically. Want me to do that?\"\n\n If the user agrees, run:\n\n coast harness-setup-prompt --harness --harness ...\n\n passing every harness the user selected in step 3. Follow the instructions in the output to set up each harness. Since you already verified the CLI in step 1 and configured `worktree_dir` in step 3, skip those steps in the harness instructions (the output will remind you of this when multiple harnesses are present).\n\n If the user declines, let them know they can set up skills later by running `coast harness-setup-prompt --harness ` and following the instructions.\n", "skills_prompt.txt": "# Coast Runtime\n\nThis project uses Coasts — containerized runtimes for running services, tests,\nand other runtime commands. The filesystem is shared between the host and the\ncontainer, so file edits on either side are visible to both immediately.\n\n## Discovery\n\nBefore the first runtime command in a session, run:\n\n coast lookup\n\nThis prints the instance name, ports, and example commands. Use the instance\nname from the output for all subsequent commands.\n\n## What runs where\n\nThe filesystem is shared, so only use `coast exec` for things that need the\ncontainer runtime (databases, services, integration tests). Everything else\nruns directly on the host.\n\nUse `coast exec` for:\n- Tests that need running services (unit tests that are integrated with services or dbs, integration tests, API tests)\n- Service restarts or compose operations\n- Anything that talks to databases, caches, or other container services\n\nRun directly on the host:\n- Linting, typechecking, formatting\n- Git operations\n- Playwright and browser tests\n- Installing host-side dependencies (npm install, pip install)\n- File search, code generation, static analysis\n\nExample:\n\n coast exec -- sh -c \"cd && npm test\" # needs DB\n coast exec --service # service shell\n npm run lint # host is fine\n npx playwright test # host is fine\n\n## Runtime feedback\n\n coast ps \n coast logs --service \n coast logs --service --tail 50\n\n## Creating and assigning Coasts\n\nIf `coast lookup` returns no match, run `coast ls` to see what exists.\n\nIf an unassigned Coast is already running for this project, prefer assigning\nyour worktree to it rather than creating a new one:\n\n coast assign -w \n\nAn already occupied Coast can also be reassigned with `coast assign`, but check\nwith the user first because that will disrupt the current slot.\n\nIf no Coast is running, ask the user before creating one — Coasts can be\nmemory intensive:\n\n coast run -w \n\nA project must be built before instances can be created. If `coast run` fails\nbecause no build exists, run `coast build` first.\n\n## Coastfile setup\n\nIf the project does not have a Coastfile yet, or if you need to modify the\nCoastfile, read the Coastfile docs first:\n\n coast docs --path coastfiles/README.md\n\n## When confused\n\nBefore guessing about Coast behavior, explore the docs:\n\n coast docs # list all doc pages\n coast docs --path concepts_and_terminology/RUN.md\n coast docs --path concepts_and_terminology/ASSIGN.md\n coast docs --path concepts_and_terminology/BUILDS.md\n coast search-docs \"your question here\" # semantic search\n\n## Troubleshooting\n\nIf you run into issues with harness configuration (e.g. worktrees not being\nfound, `coast lookup` not matching), read the troubleshooting section of the\nrelevant harness doc:\n\n coast docs --path harnesses/CLAUDE_CODE.md\n coast docs --path harnesses/CODEX.md\n coast docs --path harnesses/CONDUCTOR.md\n coast docs --path harnesses/CURSOR.md\n coast docs --path harnesses/T3_CODE.md\n coast docs --path harnesses/SHEP.md\n\n## Rules\n\n- Always run `coast lookup` before your first runtime command in a session.\n- Use `coast exec` only for things that need the container runtime.\n- Use `coast exec --service ` when you need to run inside an app/service container.\n- Run linting, typechecking, formatting, and git on the host directly.\n- Use `coast docs` or `coast search-docs` before guessing about Coast behavior.\n- Do not run services directly on the host when the project expects Coast.\n\n---\nname: coasts\ndescription: Inspect and control Coast instances for the current checkout. Use\n when the user says \"/coasts\", asks to assign or reassign a Coast, wants to\n run commands or read logs in the matching Coast, wants to create a new Coast,\n or explicitly asks to open Coast UI.\n---\n\n# Coasts\n\nUse the Coast CLI directly. Do not add wrappers.\n\n## Orient Yourself\n\nStart by exploring the CLI and docs:\n\n coast # see all available commands\n coast docs # list all doc pages\n coast search-docs \"your question\" # semantic search\n\nWhen anything about Coast behavior is unclear, read the docs before guessing:\n\n coast docs --path concepts_and_terminology/RUN.md\n coast docs --path concepts_and_terminology/BUILDS.md\n coast docs --path concepts_and_terminology/ASSIGN.md\n coast docs --path concepts_and_terminology/PORTS.md\n coast docs --path coastfiles/README.md\n\n## Quick Start\n\nRoute requests into one of these modes:\n\n1. **Use Coast** — run `coast lookup`, then use `coast exec`, `coast ps`,\n or `coast logs` with the matching instance.\n2. **Create or Assign** — run `coast ls`, then `coast run` to create a new\n Coast or `coast assign` to repoint an existing one.\n3. **Open UI** — run `coast ui`.\n\n## What Runs Where\n\nThe host and the Coast share the filesystem. Only use `coast exec` for things\nthat need running services inside the container.\n\n**Use `coast exec` for:**\n- Integration tests, API tests, anything that needs databases or services\n- Service restarts, compose operations\n- Commands that talk to container-only processes\n\n**Run on the host:**\n- Linting (`eslint`, `rubocop`, `golangci-lint`)\n- Typechecking (`tsc --noEmit`, `go vet`)\n- Formatting (`prettier`, `gofmt`)\n- Git operations\n- Playwright and browser tests\n- Static analysis, code generation\n- Package installs (`npm install`, `pip install`)\n\n## Create and Assign\n\nWhen `coast lookup` returns no match:\n\n1. Run `coast ls` to see available slots.\n2. Prefer `coast run -w ` to create and assign in one step.\n3. If no build exists yet, run `coast build` first.\n4. After creating, rerun `coast lookup` to confirm.\n\nWhen you want to switch an existing Coast to a different worktree:\n\n coast assign -w \n\nThat also works for an already assigned or checked-out Coast, but ask the user\nfirst before reassigning an occupied slot.\n\n## Coastfile Setup\n\nIf the project needs a new or modified Coastfile, read the docs first:\n\n coast docs --path coastfiles/README.md\n\nThe Coastfile docs cover compose setup, ports, volumes, secrets, shared\nservices, bare services, and inheritance.\n\n## Safety Rules\n\n- Run `coast lookup` before taking action and again after any topology change.\n- Ask before `coast assign`, `coast unassign`, or `coast checkout` if it would\n disrupt an existing slot.\n- Prefer creating a new Coast over reusing a checked-out or already-assigned\n one unless the user explicitly wants the existing slot to be reassigned.\n- Use `coast docs` or `coast search-docs` before guessing.\n", "coastfiles/README.md": "# Coastfiles\n\nA Coastfile is a TOML configuration file that lives at the root of your project. It tells Coast everything it needs to know to build and run isolated development environments for that project — which services to run, which ports to forward, how to handle data, and how to manage secrets.\n\nEvery Coast project needs at least one Coastfile. The file is always named `Coastfile` (capital C, no extension). If you need variants for different workflows, you create typed Coastfiles like `Coastfile.light` or `Coastfile.snap` that [inherit from the base](INHERITANCE.md).\n\nFor a deeper understanding of how Coastfiles relate to the rest of Coast, see [Coasts](../concepts_and_terminology/COASTS.md) and [Builds](../concepts_and_terminology/BUILDS.md).\n\n## Quickstart\n\nThe smallest possible Coastfile:\n\n```toml\n[coast]\nname = \"my-app\"\n```\n\nThis gives you a DinD container you can `coast exec` into. Most projects will want either a `compose` reference or [bare services](SERVICES.md):\n\n```toml\n[coast]\nname = \"my-app\"\ncompose = \"./docker-compose.yml\"\n\n[ports]\nweb = 3000\napi = 8080\n```\n\nOr without compose, using bare services:\n\n```toml\n[coast]\nname = \"my-app\"\n\n[coast.setup]\npackages = [\"nodejs\", \"npm\"]\n\n[services.web]\ninstall = \"npm install\"\ncommand = \"npx next dev --port 3000 --hostname 0.0.0.0\"\nport = 3000\nrestart = \"on-failure\"\n\n[ports]\nweb = 3000\n```\n\nRun `coast build` then `coast run dev-1` and you have an isolated environment.\n\n## Example Coastfiles\n\n### Simple bare-service project\n\nA Next.js app with no compose file. Coast installs Node, runs `npm install`, and starts the dev server directly.\n\n```toml\n[coast]\nname = \"my-crm\"\nruntime = \"dind\"\n\n[coast.setup]\npackages = [\"nodejs\", \"npm\"]\n\n[services.web]\ninstall = \"npm install\"\ncommand = \"npx next dev --turbopack --port 3002 --hostname 0.0.0.0\"\nport = 3002\nrestart = \"on-failure\"\n\n[ports]\nweb = 3002\n```\n\n### Full-stack compose project\n\nA multi-service project with shared databases, secrets, volume strategies, and custom setup.\n\n```toml\n[coast]\nname = \"my-app\"\ncompose = \"./infra/docker-compose.yml\"\nworktree_dir = [\".worktrees\", \"~/.codex/worktrees\"]\nprimary_port = \"web\"\n\n[coast.setup]\npackages = [\"nodejs\", \"npm\", \"python3\", \"curl\", \"git\", \"bash\", \"ca-certificates\", \"wget\"]\nrun = [\n \"ARCH=$(uname -m | sed 's/aarch64/arm64/' | sed 's/x86_64/amd64/') && wget -qO /tmp/go.tar.gz https://go.dev/dl/go1.24.1.linux-${ARCH}.tar.gz && tar -C /usr/local -xzf /tmp/go.tar.gz && rm /tmp/go.tar.gz\",\n \"GOBIN=/usr/local/bin go install github.com/air-verse/air@v1.61.7\",\n]\n\n[ports]\nweb = 3000\nbackend = 8080\npostgres = 5432\nredis = 6379\n\n[shared_services.postgres]\nimage = \"postgres:15\"\nports = [5432]\nvolumes = [\"infra_postgres_data:/var/lib/postgresql/data\"]\nenv = { POSTGRES_USER = \"myapp\", POSTGRES_PASSWORD = \"myapp_pass\" }\n\n[shared_services.redis]\nimage = \"redis:7\"\nports = [6379]\n\n[volumes.go_modules_cache]\nstrategy = \"shared\"\nservice = \"backend\"\nmount = \"/go/pkg/mod\"\n\n[secrets.db_password]\nextractor = \"env\"\nvar = \"DB_PASSWORD\"\ninject = \"env:DB_PASSWORD\"\n\n[omit]\nservices = [\"monitoring\", \"admin-panel\", \"nginx-proxy\"]\n\n[assign]\ndefault = \"none\"\n[assign.services]\nbackend = \"hot\"\nweb = \"hot\"\n```\n\n### Lightweight test variant (inheritance)\n\nExtends the base Coastfile but strips it down to only what's needed for running backend tests. No ports, no shared services, isolated databases.\n\n```toml\n[coast]\nextends = \"Coastfile\"\nautostart = false\n\n[unset]\nports = [\"web\", \"backend\", \"postgres\", \"redis\"]\nshared_services = [\"postgres\", \"redis\"]\n\n[omit]\nservices = [\"redis\", \"backend\", \"web\"]\n\n[volumes.postgres_data]\nstrategy = \"isolated\"\nservice = \"postgres\"\nmount = \"/var/lib/postgresql/data\"\n\n[assign]\ndefault = \"none\"\n[assign.services]\nbackend-test = \"rebuild\"\n```\n\n### Snapshot-seeded variant\n\nEach coast instance starts with a copy of the host's existing database volumes, then diverges independently.\n\n```toml\n[coast]\nextends = \"Coastfile\"\n\n[unset]\nshared_services = [\"postgres\", \"redis\", \"mongodb\"]\n\n[volumes.postgres_data]\nstrategy = \"isolated\"\nsnapshot_source = \"infra_postgres_data\"\nservice = \"postgres\"\nmount = \"/var/lib/postgresql/data\"\n\n[volumes.redis_data]\nstrategy = \"isolated\"\nsnapshot_source = \"infra_redis_data\"\nservice = \"redis\"\nmount = \"/data\"\n\n[volumes.mongodb_data]\nstrategy = \"isolated\"\nsnapshot_source = \"infra_mongodb_data\"\nservice = \"mongodb\"\nmount = \"/data/db\"\n```\n\n## Conventions\n\n- The file must be named `Coastfile` (capital C, no extension) and live at the project root.\n- Typed variants use the pattern `Coastfile.{type}` — for example `Coastfile.light`, `Coastfile.snap`. See [Inheritance and Types](INHERITANCE.md).\n- The reserved name `Coastfile.default` is not allowed.\n- TOML syntax is used throughout. All section headers use `[brackets]` and named entries use `[section.name]` (not array-of-tables).\n- You cannot use both `compose` and `[services]` in the same Coastfile — pick one.\n- Relative paths (for `compose`, `root`, etc.) are resolved against the Coastfile's parent directory.\n\n## Reference\n\n| Page | Sections | What it covers |\n|------|----------|----------------|\n| [Project and Setup](PROJECT.md) | `[coast]`, `[coast.setup]` | Name, compose path, runtime, worktree dir, container setup |\n| [Worktree Directories](WORKTREE_DIR.md) | `worktree_dir`, `default_worktree_dir` | Local and external worktree dirs, tilde paths, Codex/Claude integration |\n| [Ports](PORTS.md) | `[ports]`, `[egress]` | Port forwarding, egress declarations, primary port |\n| [Volumes](VOLUMES.md) | `[volumes.*]` | Isolated, shared, and snapshot-seeded volume strategies |\n| [Shared Services](SHARED_SERVICES.md) | `[shared_services.*]` | Host-level databases and infrastructure services |\n| [Secrets](SECRETS.md) | `[secrets.*]`, `[inject]` | Secret extraction, injection, and host env/file forwarding |\n| [Bare Services](SERVICES.md) | `[services.*]` | Running processes directly without Docker Compose |\n| [Agent Shell](AGENT_SHELL.md) | `[agent_shell]` | Containerized agent TUI runtimes |\n| [MCP Servers](MCP.md) | `[mcp.*]`, `[mcp_clients.*]` | Internal and host-proxied MCP servers, client connectors |\n| [Assign](ASSIGN.md) | `[assign]` | Branch-switch behavior per service |\n| [Inheritance and Types](INHERITANCE.md) | `extends`, `includes`, `[unset]`, `[omit]` | Typed Coastfiles, composition, and overrides |\n", "coastfiles/AGENT_SHELL.md": "# Agent Shell\n\n> **In most workflows, you do not need to containerize your coding agent.** Because Coasts share the [filesystem](../concepts_and_terminology/FILESYSTEM.md) with your host machine, the simplest approach is to run the agent on your host and use [`coast exec`](../concepts_and_terminology/EXEC_AND_DOCKER.md) for runtime-heavy tasks like integration tests. Agent shells are for cases where you specifically want the agent running inside the container — for example, to give it direct access to the inner Docker daemon or to fully isolate its environment.\n\nThe `[agent_shell]` section configures an agent TUI — such as Claude Code or Codex — to run inside the Coast container. When present, Coast automatically spawns a persistent PTY session running the configured command when an instance starts.\n\nFor the full picture of how agent shells work — the active agent model, sending input, lifecycle and recovery — see [Agent Shells](../concepts_and_terminology/AGENT_SHELLS.md).\n\n## Configuration\n\nThe section has a single required field: `command`.\n\n```toml\n[agent_shell]\ncommand = \"claude --dangerously-skip-permissions\"\n```\n\n### `command` (required)\n\nThe shell command to run in the agent PTY. This is typically a coding agent CLI that you've installed via `[coast.setup]`.\n\nThe command runs inside the DinD container at `/workspace` (the project root). It is not a compose service — it runs alongside your compose stack or bare services, not inside them.\n\n## Lifecycle\n\n- The agent shell spawns automatically on `coast run`.\n- In [Coastguard](../concepts_and_terminology/COASTGUARD.md), it appears as a persistent \"Agent\" tab that cannot be closed.\n- If the agent process exits, Coast can respawn it.\n- You can send input to a running agent shell via `coast agent-shell input`.\n\n## Examples\n\n### Claude Code\n\nInstall Claude Code in `[coast.setup]`, configure credentials via [secrets](SECRETS.md), then set up the agent shell:\n\n```toml\n[coast]\nname = \"my-app\"\ncompose = \"./docker-compose.yml\"\n\n[coast.setup]\npackages = [\"nodejs\", \"npm\", \"git\", \"bash\"]\nrun = [\n \"npm install -g @anthropic-ai/claude-code\",\n \"mkdir -p /root/.claude\",\n]\n\n[secrets.claude_credentials]\nextractor = \"keychain\"\nservice = \"Claude Code-credentials\"\ninject = \"file:/root/.claude/.credentials.json\"\n\n[agent_shell]\ncommand = \"cd /workspace; exec claude --dangerously-skip-permissions --effort high\"\n```\n\n### Simple agent shell\n\nA minimal agent shell for testing that the feature works:\n\n```toml\n[coast]\nname = \"test-agent\"\n\n[coast.setup]\npackages = [\"bash\"]\n\n[agent_shell]\ncommand = \"exec sh -c 'while true; do echo agent-heartbeat; sleep 5; done'\"\n```\n", @@ -389,11 +389,11 @@ "concepts_and_terminology/SHARED_SERVICES.md": "# Shared Services\n\nShared services are database and infrastructure containers (Postgres, Redis, MongoDB, etc.) that run on your host Docker daemon rather than inside a Coast. Coast instances connect to them over a bridge network, so every Coast talks to the same service on the same host volume.\n\n![Shared services in Coastguard](../../assets/coastguard-shared-services.png)\n*The Coastguard shared services tab showing host-managed Postgres, Redis, and MongoDB.*\n\n## How They Work\n\nWhen you declare a shared service in your Coastfile, Coast starts it on the host daemon and removes it from the compose stack that runs inside each Coast container. Coasts are then configured to route service-name traffic back to the shared container while preserving the service's container-side port inside the Coast.\n\n```text\nHost Docker daemon\n |\n +--> postgres (host volume: infra_postgres_data)\n +--> redis (host volume: infra_redis_data)\n +--> mongodb (host volume: infra_mongodb_data)\n |\n +--> Coast: dev-1 --bridge network--> host postgres, redis, mongodb\n +--> Coast: dev-2 --bridge network--> host postgres, redis, mongodb\n```\n\nBecause shared services reuse your existing host volumes, any data you already have from running `docker-compose up` locally is immediately available to your Coasts.\n\nThis distinction matters when you use mapped ports:\n\n```toml\n[shared_services.postgis]\nimage = \"ghcr.io/baosystems/postgis:12-3.3\"\nports = [\"5433:5432\"]\n```\n\n- On the host, the shared service is published on `localhost:5433`.\n- Inside every Coast, app containers still connect to `postgis:5432`.\n- A bare integer like `5432` is shorthand for the identity mapping `\"5432:5432\"`.\n\n## When to Use Shared Services\n\n- Your project has MCP integrations that connect to a local database — shared services let those continue to work without dynamic port discovery. If you publish the shared service on the same host port your tools already use (for example `ports = [5432]`), those tools keep working unchanged. If you publish it on a different host port (for example `\"5433:5432\"`), host-side tools should use that host port while Coasts continue using the container port.\n- You want lighter Coast instances since they do not need to run their own database containers.\n- You do not need data isolation between Coast instances (every instance sees the same data).\n- You are running coding agents on the host (see [Filesystem](FILESYSTEM.md)) and want them to access database state without routing through [`coast exec`](EXEC_AND_DOCKER.md). With shared services, the agent's existing database tools and MCPs work unchanged.\n\nSee the [Volume Topology](VOLUMES.md) page for alternatives when you do need isolation.\n\n## Volume Disambiguation Warning\n\nDocker volume names are not always globally unique. If you run `docker-compose up` from multiple different projects, the host volumes that Coast attaches to shared services may not be the ones you expect.\n\nBefore starting Coasts with shared services, make sure the last `docker-compose up` you ran was from the project you intend to use with Coasts. This ensures the host volumes match what your Coastfile expects.\n\n## Troubleshooting\n\nIf your shared services appear to be pointing at the wrong host volume:\n\n1. Open the [Coastguard](COASTGUARD.md) UI (`coast ui`).\n2. Navigate to the **Shared Services** tab.\n3. Select the affected services and click **Remove**.\n4. Click **Refresh Shared Services** to recreate them from your current Coastfile configuration.\n\nThis tears down and recreates the shared service containers, reattaching them to the correct host volumes.\n", "concepts_and_terminology/TROUBLESHOOTING.md": "# Troubleshooting\n\nMost issues with Coasts come from stale state, orphaned Docker resources, or a daemon that got out of sync. This page covers the escalation path from mild to nuclear.\n\n## Doctor\n\nIf things feel off — instances show as running but nothing responds, ports seem stuck, or the UI shows stale data — start with `coast doctor`:\n\n```bash\ncoast doctor\n```\n\nDoctor scans the state database and Docker for inconsistencies: orphaned instance records with missing containers, dangling containers with no state record, and shared services marked running that are actually dead. It fixes what it finds automatically.\n\nTo preview what it would do without changing anything:\n\n```bash\ncoast doctor --dry-run\n```\n\n## Daemon Restart\n\nIf the daemon itself seems unresponsive or you suspect it is in a bad state, restart it:\n\n```bash\ncoast daemon restart\n```\n\nThis sends a graceful shutdown signal, waits for the daemon to exit, and starts a fresh process. Your instances and state are preserved.\n\n## Removing a Single Project\n\nIf the problem is isolated to one project, you can remove its build artifacts and associated Docker resources without affecting anything else:\n\n```bash\ncoast rm-build my-project\n```\n\nThis deletes the project's artifact directory, Docker images, volumes, and containers. It asks for confirmation first. Pass `--force` to skip the prompt.\n\n## Missing Shared Service Images\n\nIf `coast run` fails while creating a shared service with an error like `No such image: postgres:15`, the image is missing from your host Docker daemon.\n\nThis most commonly happens when your `Coastfile` defines `shared_services` such as Postgres or Redis and Docker has not pulled those images yet.\n\nPull the missing image, then run the instance again:\n\n```bash\ndocker pull postgres:15\ndocker pull redis:7\ncoast run my-instance\n```\n\nIf you are not sure which image is missing, the failing `coast run` output will include the image name in the Docker error. After a failed provisioning attempt, Coasts cleans up the partial instance automatically, so seeing the instance return to `stopped` is expected.\n\n## Factory Reset with Nuke\n\nWhen nothing else works — or you just want a completely clean slate — `coast nuke` performs a full factory reset:\n\n```bash\ncoast nuke\n```\n\nThis will:\n\n1. Stop the `coastd` daemon.\n2. Remove **all** coast-managed Docker containers.\n3. Remove **all** coast-managed Docker volumes.\n4. Remove **all** coast-managed Docker networks.\n5. Remove **all** coast Docker images.\n6. Delete the entire `~/.coast/` directory (state database, builds, logs, secrets, image cache).\n7. Recreate `~/.coast/` and restart the daemon so coast is immediately usable again.\n\nBecause this destroys everything, you must type `nuke` at the confirmation prompt:\n\n```text\n$ coast nuke\nWARNING: This will permanently destroy ALL coast data:\n\n - Stop the coastd daemon\n - Remove all coast-managed Docker containers\n - Remove all coast-managed Docker volumes\n - Remove all coast-managed Docker networks\n - Remove all coast Docker images\n - Delete ~/.coast/ (state DB, builds, logs, secrets, image cache)\n\nType \"nuke\" to confirm:\n```\n\nPass `--force` to skip the prompt (useful in scripts):\n\n```bash\ncoast nuke --force\n```\n\nAfter a nuke, coast is ready to use — the daemon is running and the home directory exists. You just need to `coast build` and `coast run` your projects again.\n\n## Reporting Bugs\n\nIf you hit a problem that is not resolved by any of the above, include the daemon logs when reporting:\n\n```bash\ncoast daemon logs\n```\n", "concepts_and_terminology/VOLUMES.md": "# Volume Topology\n\nCoast provides three volume strategies that control how data-heavy services (databases, caches, etc.) store and share their data across Coast instances. Choosing the right strategy depends on how much isolation you need and how much overhead you can tolerate.\n\n## Shared Services\n\n[Shared services](SHARED_SERVICES.md) run on your host Docker daemon, outside of any Coast container. Services like Postgres, MongoDB, and Redis stay on the host machine and Coast instances route their calls back to the host over a bridge network.\n\n```text\nHost machine\n |\n +--> Postgres (host daemon, existing volume)\n +--> Redis (host daemon, existing volume)\n |\n +--> Coast: dev-1 --connects to--> host Postgres, host Redis\n +--> Coast: dev-2 --connects to--> host Postgres, host Redis\n```\n\nThere is no data isolation between instances — every Coast talks to the same database. In return you get:\n\n- Lighter Coast instances since they do not run their own database containers.\n- Your existing host volumes are reused directly, so any data you already have is available immediately.\n- MCP integrations that connect to your local database continue to work out of the box.\n\nThis is configured in your [Coastfile](COASTFILE_TYPES.md) under `[shared_services]`.\n\n## Shared Volumes\n\nShared volumes mount a single Docker volume that is shared across all Coast instances. The services themselves (Postgres, Redis, etc.) run inside each Coast container, but they all read and write to the same underlying volume.\n\n```text\nCoast: dev-1 --mounts--> shared volume \"my-project-postgres\"\nCoast: dev-2 --mounts--> shared volume \"my-project-postgres\"\n```\n\nThis isolates your Coast data from whatever is on your host machine, but instances still share data with each other. This is useful when you want a clean separation from your host development environment without the overhead of per-instance volumes.\n\n```toml\n[volumes.postgres_data]\nstrategy = \"shared\"\nservice = \"postgres\"\nmount = \"/var/lib/postgresql/data\"\n```\n\n## Isolated Volumes\n\nIsolated volumes give each Coast instance its own independent volume. No data is shared between instances or with the host. Each instance starts empty (or from a snapshot — see below) and diverges independently.\n\n```text\nCoast: dev-1 --mounts--> volume \"dev-1-postgres\"\nCoast: dev-2 --mounts--> volume \"dev-2-postgres\"\n```\n\nThis is the best choice for projects that are integration-test heavy and need true volume isolation between parallel environments. The tradeoff is slower startup and larger Coast builds since each instance maintains its own copy of the data.\n\n```toml\n[volumes.postgres_data]\nstrategy = \"isolated\"\nservice = \"postgres\"\nmount = \"/var/lib/postgresql/data\"\n```\n\n## Snapshotting\n\nBoth the shared and isolated strategies start with empty volumes by default. If you want instances to start with a copy of an existing host volume, set `snapshot_source` to the name of the Docker volume to copy from:\n\n```toml\n[volumes.postgres_data]\nstrategy = \"isolated\"\nsnapshot_source = \"infra_postgres_data\"\nservice = \"postgres\"\nmount = \"/var/lib/postgresql/data\"\n```\n\nThe snapshot is taken at [build time](BUILDS.md). After creation, each instance's volume diverges independently — mutations do not propagate back to the source or to other instances.\n\nCoast does not yet support runtime snapshotting (e.g., snapshotting a volume from a running instance). This is planned for a future release.\n", - "harnesses/README.md": "# Harnesses\n\nEach harness creates git worktrees in a different location. In Coasts, the\n[`worktree_dir`](../coastfiles/WORKTREE_DIR.md) array tells it where to look --\nincluding external paths like `~/.codex/worktrees` that require additional\nbind mounts.\n\nEach harness also has its own conventions for project-level instructions, skills, and commands. The matrix below shows what each harness supports so you know where to put guidance for Coasts. Each page covers the Coastfile configuration, the recommended file layout, and any caveats specific to that harness.\n\nIf one repo is used from multiple harnesses, see [Multiple Harnesses](MULTIPLE_HARNESSES.md).\n\n| Harness | Worktree location | Project instructions | Skills | Commands | Page |\n|---------|-------------------|----------------------|--------|----------|------|\n| OpenAI Codex | `~/.codex/worktrees` | `AGENTS.md` | `.agents/skills/` | Skills surface as `/` commands | [Codex](CODEX.md) |\n| Claude Code | `.claude/worktrees` | `CLAUDE.md` | `.claude/skills/` | `.claude/commands/` | [Claude Code](CLAUDE_CODE.md) |\n| Cursor | `~/.cursor/worktrees/` | `AGENTS.md` or `.cursor/rules/` | `.cursor/skills/` or `.agents/skills/` | `.cursor/commands/` | [Cursor](CURSOR.md) |\n| Conductor | `~/conductor/workspaces/` | `CLAUDE.md` | -- | -- | [Conductor](CONDUCTOR.md) |\n| T3 Code | `~/.t3/worktrees/` | `AGENTS.md` | `.agents/skills/` | -- | [T3 Code](T3_CODE.md) |\n| Shep | `~/.shep/repos/*/wt` | `CLAUDE.md` | `.agents/skills/` or `.claude/skills/` | -- | [Shep](SHEP.md) |\n\n## Skills vs Commands\n\nSkills and commands both let you define a reusable `/coasts` workflow. You can use either or both, depending on what the harness supports.\n\nIf your harness supports commands and you want an explicit `/coasts`\nentrypoint, one simple option is to add a command that reuses the skill.\nCommands are explicitly invoked by name, so you know exactly when the\n`/coasts` workflow runs. Skills can also be loaded automatically by the agent\nbased on context, which is useful but means you have less control over when the\ninstructions are pulled in.\n\nYou can use both. If you do, let the command reuse the skill instead of\nmaintaining a separate copy of the workflow.\n\nIf the harness only supports skills (T3 Code), use a skill. If it supports\nneither (Conductor), put the `/coasts` workflow directly in the project\ninstructions file.\n", + "harnesses/README.md": "# Harnesses\n\n```youtube\nAWhaeam9R7o\n```\n\nEach harness creates git worktrees in a different location. In Coasts, the\n[`worktree_dir`](../coastfiles/WORKTREE_DIR.md) array tells it where to look --\nincluding external paths like `~/.codex/worktrees` that require additional\nbind mounts.\n\nEach harness also has its own conventions for project-level instructions, skills, and commands. The matrix below shows what each harness supports so you know where to put guidance for Coasts. Each page covers the Coastfile configuration, the recommended file layout, and any caveats specific to that harness.\n\nIf one repo is used from multiple harnesses, see [Multiple Harnesses](MULTIPLE_HARNESSES.md).\n\n| Harness | Worktree location | Project instructions | Skills | Commands | Page |\n|---------|-------------------|----------------------|--------|----------|------|\n| OpenAI Codex | `~/.codex/worktrees` | `AGENTS.md` | `.agents/skills/` | Skills surface as `/` commands | [Codex](CODEX.md) |\n| Claude Code | `.claude/worktrees` | `CLAUDE.md` | `.claude/skills/` | `.claude/commands/` | [Claude Code](CLAUDE_CODE.md) |\n| Cursor | `~/.cursor/worktrees/` | `AGENTS.md` or `.cursor/rules/` | `.cursor/skills/` or `.agents/skills/` | `.cursor/commands/` | [Cursor](CURSOR.md) |\n| Conductor | `~/conductor/workspaces/` | `CLAUDE.md` | -- | -- | [Conductor](CONDUCTOR.md) |\n| T3 Code | `~/.t3/worktrees/` | `AGENTS.md` | `.agents/skills/` | -- | [T3 Code](T3_CODE.md) |\n| Shep | `~/.shep/repos/*/wt` | `CLAUDE.md` | `.agents/skills/` or `.claude/skills/` | -- | [Shep](SHEP.md) |\n\n## Skills vs Commands\n\nSkills and commands both let you define a reusable `/coasts` workflow. You can use either or both, depending on what the harness supports.\n\nIf your harness supports commands and you want an explicit `/coasts`\nentrypoint, one simple option is to add a command that reuses the skill.\nCommands are explicitly invoked by name, so you know exactly when the\n`/coasts` workflow runs. Skills can also be loaded automatically by the agent\nbased on context, which is useful but means you have less control over when the\ninstructions are pulled in.\n\nYou can use both. If you do, let the command reuse the skill instead of\nmaintaining a separate copy of the workflow.\n\nIf the harness only supports skills (T3 Code), use a skill. If it supports\nneither (Conductor), put the `/coasts` workflow directly in the project\ninstructions file.\n", "harnesses/CLAUDE_CODE.md": "# Claude Code\n\n## Quick setup\n\nRequires the [Coast CLI](../GETTING_STARTED.md). Copy this prompt into your\nagent's chat to set up Coasts automatically:\n\n```prompt-copy\nclaude_code_setup_prompt.txt\n```\n\nYou can also get the skill content from the CLI: `coast skills-prompt`.\n\nAfter setup, **start a new Claude Code session** — skills and `CLAUDE.md` changes\nare loaded at session start.\n\n---\n\n[Claude Code](https://docs.anthropic.com/en/docs/claude-code/overview) creates\nworktrees inside the project at `.claude/worktrees/`. Because that directory\nlives inside the repo, Coasts can discover and assign Claude Code worktrees\nwithout any external bind mount.\n\nClaude Code is also the harness here with the clearest split between three\nlayers for Coasts:\n\n- `CLAUDE.md` for short, always-on rules for working with Coasts\n- `.claude/skills/coasts/SKILL.md` for the reusable `/coasts` workflow\n- `.claude/commands/coasts.md` only when you want a command file as an extra\n entrypoint\n\n```youtube\nyjMFVoOiAW0\n```\n\n## Setup\n\nAdd `.claude/worktrees` to `worktree_dir`:\n\n```toml\n[coast]\nname = \"my-app\"\nworktree_dir = [\".worktrees\", \".claude/worktrees\"]\n```\n\nBecause `.claude/worktrees` is project-relative, no external bind mount is\nneeded.\n\n## Where Coasts guidance goes\n\n### `CLAUDE.md`\n\nPut the rules for Coasts that should apply on every task here. Keep this short and\noperational:\n\n- run `coast lookup` before the first runtime command in a session\n- use `coast exec` for tests, builds, and service commands\n- use `coast ps` and `coast logs` for runtime feedback\n- ask before creating or reassigning a Coast when no match exists\n\n### `.claude/skills/coasts/SKILL.md`\n\nPut the reusable `/coasts` workflow here. This is the right home for a flow\nthat:\n\n1. runs `coast lookup` and reuses the matching Coast\n2. falls back to `coast ls` when there is no match\n3. offers `coast run`, `coast assign`, `coast unassign`, `coast checkout`, and\n `coast ui`\n4. uses the Coast CLI directly as the contract instead of wrapping it\n\nIf this repo also uses Codex, T3 Code, or Cursor, see\n[Multiple Harnesses](MULTIPLE_HARNESSES.md) and keep the canonical skill in\n`.agents/skills/coasts/`, then expose it to Claude Code.\n\n### `.claude/commands/coasts.md`\n\nClaude Code also supports project command files. For docs about Coasts, treat\nthis as optional:\n\n- use it only when you specifically want a command file\n- one simple option is to have the command reuse the same skill\n- if you give the command its own separate instructions, you are taking on a\n second copy of the workflow to maintain\n\n## Example layout\n\n### Claude Code only\n\n```text\nCLAUDE.md\n.claude/worktrees/\n.claude/skills/coasts/SKILL.md\n```\n\nIf this repo also uses Codex, T3 Code, or Cursor, use the shared pattern in\n[Multiple Harnesses](MULTIPLE_HARNESSES.md) instead of duplicating it here,\nbecause duplicated provider-specific guidance gets harder to keep in sync every\ntime you add another harness.\n\n## What Coasts does\n\n- **Run** — `coast run ` creates a new Coast instance from the latest build. Use `coast run -w ` to create and assign a Claude Code worktree in one step. See [Run](../concepts_and_terminology/RUN.md).\n- **Discovery** — Coasts reads `.claude/worktrees` like any other local worktree\n directory.\n- **Naming** — Claude Code worktrees follow the same local worktree naming\n behavior as other in-repo worktrees in the Coasts UI and CLI.\n- **Assign** — `coast assign` can switch `/workspace` to a Claude Code worktree\n without any external bind-mount indirection.\n- **Gitignored sync** — Works normally because the worktrees live inside the\n repository tree.\n- **Orphan detection** — If Claude Code removes a worktree, Coasts can detect\n the missing gitdir and unassign it when needed.\n\n## Example\n\n```toml\n[coast]\nname = \"my-app\"\ncompose = \"./docker-compose.yml\"\nworktree_dir = [\".worktrees\", \".claude/worktrees\", \"~/.codex/worktrees\"]\nprimary_port = \"web\"\n\n[ports]\nweb = 3000\napi = 8080\n\n[assign]\ndefault = \"none\"\n[assign.services]\nweb = \"hot\"\napi = \"hot\"\n```\n\n- `.claude/worktrees/` — Claude Code worktrees\n- `~/.codex/worktrees/` — Codex worktrees if you also use Codex in this repo\n\n## Troubleshooting\n\n- **Worktree not found** — If Coasts expects a worktree to exist but cannot\n find it, verify that the Coastfile's `worktree_dir` includes\n `.claude/worktrees`. See [Worktree Directories](../coastfiles/WORKTREE_DIR.md)\n for syntax and path types.\n\n## Limitations\n\n- If you duplicate the same `/coasts` workflow across `CLAUDE.md`,\n `.claude/skills`, and `.claude/commands`, those copies will drift. Keep\n `CLAUDE.md` short and keep the reusable workflow in one skill.\n- If you want one repo to work cleanly in multiple harnesses, prefer the shared\n pattern in [Multiple Harnesses](MULTIPLE_HARNESSES.md).\n", "harnesses/CODEX.md": "# Codex\n\n## Quick setup\n\nRequires the [Coast CLI](../GETTING_STARTED.md). Copy this prompt into your\nagent's chat to set up Coasts automatically:\n\n```prompt-copy\ncodex_setup_prompt.txt\n```\n\nYou can also get the skill content from the CLI: `coast skills-prompt`.\n\nAfter setup, **quit and reopen Codex** for the new skill and `AGENTS.md` to take\neffect.\n\n---\n\n[Codex](https://developers.openai.com/codex/app/worktrees/) creates worktrees at `$CODEX_HOME/worktrees` (typically `~/.codex/worktrees`). Each worktree lives under an opaque hash directory like `~/.codex/worktrees/a0db/project-name`, starts on a detached HEAD, and is cleaned up automatically based on Codex's retention policy.\n\nFrom the [Codex docs](https://developers.openai.com/codex/app/worktrees/):\n\n> Can I control where worktrees are created?\n> Not today. Codex creates worktrees under `$CODEX_HOME/worktrees` so it can manage them consistently.\n\nBecause these worktrees live outside the project root, Coasts needs explicit\nconfiguration to discover and mount them.\n\n```youtube\nMDidmMQtaqU\n```\n\n## Setup\n\nAdd `~/.codex/worktrees` to `worktree_dir`:\n\n```toml\n[coast]\nname = \"my-app\"\nworktree_dir = [\".worktrees\", \"~/.codex/worktrees\"]\n```\n\nCoasts expands `~` at runtime and treats any path starting with `~/` or `/` as\nexternal. See [Worktree Directories](../coastfiles/WORKTREE_DIR.md) for\ndetails.\n\nAfter changing `worktree_dir`, existing instances must be **recreated** for the bind mount to take effect:\n\n```bash\ncoast rm my-instance\ncoast build\ncoast run my-instance\n```\n\nThe worktree listing updates immediately (Coasts reads the new Coastfile), but\nassigning to a Codex worktree requires the bind mount inside the container.\n\n## Where Coasts guidance goes\n\nUse Codex's project instruction file and shared skill layout for working with\nCoasts:\n\n- put the short Coast Runtime rules in `AGENTS.md`\n- put the reusable `/coasts` workflow in `.agents/skills/coasts/SKILL.md`\n- Codex surfaces that skill as the `/coasts` command\n- if you use Codex-specific metadata, keep it beside the skill in\n `.agents/skills/coasts/agents/openai.yaml`\n- do not create a separate project command file just for docs about Coasts; the\n skill is the reusable surface\n- if this repo also uses Cursor or Claude Code, keep the canonical skill in\n `.agents/skills/` and expose it from there. See\n [Multiple Harnesses](MULTIPLE_HARNESSES.md) and\n [Skills for Host Agents](../SKILLS_FOR_HOST_AGENTS.md).\n\nFor example, a minimal `.agents/skills/coasts/agents/openai.yaml` could look\nlike this:\n\n```yaml\ninterface:\n display_name: \"Coasts\"\n short_description: \"Inspect, assign, and open Coasts for this repo\"\n default_prompt: \"Use this skill when the user wants help finding, assigning, or opening a Coast.\"\n\npolicy:\n allow_implicit_invocation: false\n```\n\nThat keeps the skill visible in Codex with a nicer label and makes `/coasts` an\nexplicit command. Only add `dependencies.tools` if the skill also needs MCP\nservers or other OpenAI-managed tool wiring.\n\n## What Coasts does\n\n- **Run** -- `coast run ` creates a new Coast instance from the latest build. Use `coast run -w ` to create and assign a Codex worktree in one step. See [Run](../concepts_and_terminology/RUN.md).\n- **Bind mount** -- At container creation, Coasts mounts\n `~/.codex/worktrees` into the container at `/host-external-wt/{index}`.\n- **Discovery** -- `git worktree list --porcelain` is repo-scoped, so only Codex worktrees belonging to the current project appear, even though the directory contains worktrees for many projects.\n- **Naming** -- Detached HEAD worktrees show as their relative path within the external dir (`a0db/my-app`, `eca7/my-app`). Branch-based worktrees show the branch name.\n- **Assign** -- `coast assign` remounts `/workspace` from the external bind mount path.\n- **Gitignored sync** -- Runs on the host filesystem with absolute paths, works without the bind mount.\n- **Orphan detection** -- The git watcher scans external directories\n recursively, filtering by `.git` gitdir pointers. If Codex deletes a\n worktree, Coasts auto-unassigns the instance.\n\n## Example\n\n```toml\n[coast]\nname = \"my-app\"\ncompose = \"./docker-compose.yml\"\nworktree_dir = [\".worktrees\", \".claude/worktrees\", \"~/.codex/worktrees\"]\nprimary_port = \"web\"\n\n[ports]\nweb = 3000\napi = 8080\n\n[assign]\ndefault = \"none\"\n[assign.services]\nweb = \"hot\"\napi = \"hot\"\n```\n\n- `.claude/worktrees/` -- Claude Code (local, no special handling)\n- `~/.codex/worktrees/` -- Codex (external, bind-mounted)\n\n## Troubleshooting\n\n- **Worktree not found** — If Coasts expects a worktree to exist but cannot\n find it, verify that the Coastfile's `worktree_dir` includes\n `~/.codex/worktrees`. See [Worktree Directories](../coastfiles/WORKTREE_DIR.md)\n for syntax and path types.\n\n## Limitations\n\n- Codex may clean up worktrees at any time. The orphan detection in Coasts\n handles this gracefully.\n", "harnesses/CONDUCTOR.md": "# Conductor\n\n## Quick setup\n\nRequires the [Coast CLI](../GETTING_STARTED.md). Copy this prompt into your\nagent's chat to set up Coasts automatically:\n\n```prompt-copy\nconductor_setup_prompt.txt\n```\n\nYou can also get the skill content from the CLI: `coast skills-prompt`.\n\n> **Important:** Conductor runs each session in an isolated git worktree. The\n> setup prompt creates files that only exist in the current workspace — commit\n> and merge them into your main branch or they won't be available in new\n> sessions.\n\nAfter setup, **fully close and reopen Conductor** for changes to take effect. If\nthe `/coasts` command does not appear, close and reopen again.\n\n```youtube\nmbwilJHlanQ\n```\n\n## Setup\n\nAdd `~/conductor/workspaces/` to `worktree_dir`. Unlike Codex (which stores all projects under one flat directory), Conductor nests worktrees under a per-project subdirectory, so the path must include the project name. In the example below, `my-app` must match the actual folder name under `~/conductor/workspaces/` for your repo.\n\n```toml\n[coast]\nname = \"my-app\"\nworktree_dir = [\".worktrees\", \"~/conductor/workspaces/my-app\"]\n```\n\nConductor allows you to configure the workspaces path per-repository, so the default `~/conductor/workspaces` may not match your setup. Check your Conductor repository settings to find the actual path and adjust accordingly — the principle is the same regardless of where the directory lives.\n\nIf you have more than one Conductor project configured for the same repository, each project creates workspaces under its own subdirectory (e.g. `~/conductor/workspaces/my-app-frontend`, `~/conductor/workspaces/my-app-backend`). The `worktree_dir` entry must match the directory name Conductor actually creates, so you may need multiple entries or need to update the path when switching between projects.\n\nCoasts expands `~` at runtime and treats any path starting with `~/` or `/` as\nexternal. See [Worktree Directories](../coastfiles/WORKTREE_DIR.md) for\ndetails.\n\nAfter changing `worktree_dir`, existing instances must be **recreated** for the bind mount to take effect:\n\n```bash\ncoast rm my-instance\ncoast build\ncoast run my-instance\n```\n\nThe worktree listing updates immediately (Coasts reads the new Coastfile), but\nassigning to a Conductor worktree requires the bind mount inside the container.\n\n## Where Coasts guidance goes\n\nTreat Conductor as its own harness for working with Coasts:\n\n- put the short Coast Runtime rules in `CLAUDE.md`\n- use Conductor Repository Settings scripts for setup or run behavior that is\n actually Conductor-specific\n- do not assume full Claude Code project command or project skill behavior here\n- if you add a command and it does not appear, fully close and reopen\n Conductor before testing again\n- if this repo also uses other harnesses, see\n [Multiple Harnesses](MULTIPLE_HARNESSES.md) and\n [Skills for Host Agents](../SKILLS_FOR_HOST_AGENTS.md) for ways to keep the\n shared `/coasts` workflow in one place\n\n## What Coasts does\n\n- **Run** — `coast run ` creates a new Coast instance from the latest build. Use `coast run -w ` to create and assign a Conductor worktree in one step. See [Run](../concepts_and_terminology/RUN.md).\n- **Bind mount** — At container creation, Coasts mounts\n `~/conductor/workspaces/` into the container at\n `/host-external-wt/{index}`.\n- **Discovery** — `git worktree list --porcelain` is repo-scoped, so only worktrees belonging to the current project appear.\n- **Naming** — Conductor worktrees use named branches, so they appear by branch\n name in the Coasts UI and CLI (e.g., `scroll-to-bottom-btn`). A branch can\n only be checked out in one Conductor workspace at a time.\n- **Assign** — `coast assign` remounts `/workspace` from the external bind mount path.\n- **Gitignored sync** — Runs on the host filesystem with absolute paths, works without the bind mount.\n- **Orphan detection** — The git watcher scans external directories\n recursively, filtering by `.git` gitdir pointers. If Conductor archives or\n deletes a workspace, Coasts auto-unassigns the instance.\n\n## Example\n\n```toml\n[coast]\nname = \"my-app\"\ncompose = \"./docker-compose.yml\"\nworktree_dir = [\"~/conductor/workspaces/my-app\"]\nprimary_port = \"web\"\n\n[ports]\nweb = 3000\napi = 8080\n\n[assign]\ndefault = \"none\"\n[assign.services]\nweb = \"hot\"\napi = \"hot\"\n```\n\n- `~/conductor/workspaces/my-app/` — Conductor (external, bind-mounted; replace `my-app` with your repo folder name)\n\n## Troubleshooting\n\n- **Worktree not found** — If Coasts expects a worktree to exist but cannot\n find it, verify that the Coastfile's `worktree_dir` includes the correct\n `~/conductor/workspaces/` path. The `` segment\n must match the actual folder name Conductor creates under\n `~/conductor/workspaces/`. See\n [Worktree Directories](../coastfiles/WORKTREE_DIR.md) for syntax and path\n types.\n- **Multiple projects for the same repo** — If more than one Conductor project\n is configured for the same repository, each project creates workspaces under\n a different subdirectory. The `worktree_dir` must be updated to match the\n directory Conductor dynamically creates for the active project. If you switch\n between projects, the path changes and the Coastfile needs to reflect that.\n\n## Conductor Env Vars\n\n- Avoid relying on Conductor-specific environment variables (e.g.,\n `CONDUCTOR_PORT`, `CONDUCTOR_WORKSPACE_PATH`) for runtime configuration\n inside Coasts. Coasts manages ports, workspace paths, and service discovery\n independently — use Coastfile `[ports]` and `coast exec` instead.", - "harnesses/CURSOR.md": "# Cursor\n\n## Quick setup\n\nRequires the [Coast CLI](../GETTING_STARTED.md). Copy this prompt into your\nagent's chat to set up Coasts automatically:\n\n```prompt-copy\ncursor_setup_prompt.txt\n```\n\nYou can also get the skill content from the CLI: `coast skills-prompt`.\n\nAfter setup, **restart Cursor** for the skill and rules changes to take effect.\n\n---\n\n[Cursor](https://cursor.com/docs/agent/overview) can work directly in your\ncurrent checkout, and its Parallel Agents feature can also create git\nworktrees under `~/.cursor/worktrees//`.\n\nFor docs about Coasts, that means there are two setup cases:\n\n- if you are just using Cursor in the current checkout, no Cursor-specific\n `worktree_dir` entry is required\n- if you use Cursor Parallel Agents, add the Cursor worktree directory to\n `worktree_dir` so Coasts can discover and assign those worktrees\n\n## Setup\n\n### Current checkout only\n\nIf Cursor is just editing the checkout you already opened, Coasts does not need\nany special Cursor-specific worktree path. Coasts will treat that checkout like\nany other local repository root.\n\n### Cursor Parallel Agents\n\nIf you use Parallel Agents, add `~/.cursor/worktrees/` to\n`worktree_dir`:\n\n```toml\n[coast]\nname = \"my-app\"\nworktree_dir = [\".worktrees\", \"~/.cursor/worktrees/my-app\"]\n```\n\nCursor stores each agent worktree beneath that per-project directory. Coasts\nexpands `~` at runtime and treats the path as external, so existing instances\nmust be recreated for the bind mount to take effect:\n\n```bash\ncoast rm my-instance\ncoast build\ncoast run my-instance\n```\n\nThe worktree listing updates immediately after the Coastfile change, but\nassigning to a Cursor Parallel Agent worktree requires the external bind mount\ninside the container.\n\n## Where Coasts guidance goes\n\n### `AGENTS.md` or `.cursor/rules/coast.md`\n\nPut the short, always-on Coast Runtime rules here:\n\n- use `AGENTS.md` if you want the most portable project instructions\n- use `.cursor/rules/coast.md` if you want Cursor-native project rules and\n settings UI support\n- do not duplicate the same Coast Runtime block in both unless you have a clear\n reason\n\n### `.cursor/skills/coasts/SKILL.md` or shared `.agents/skills/coasts/SKILL.md`\n\nPut the reusable `/coasts` workflow here:\n\n- for a Cursor-only repo, `.cursor/skills/coasts/SKILL.md` is a natural home\n- for a multi-harness repo, keep the canonical skill in\n `.agents/skills/coasts/SKILL.md`; Cursor can load that directly\n- the skill should own the real `/coasts` workflow: `coast lookup`,\n `coast ls`, `coast run`, `coast assign`, `coast unassign`,\n `coast checkout`, and `coast ui`\n\n### `.cursor/commands/coasts.md`\n\nCursor also supports project commands. For docs about Coasts, treat commands as\noptional:\n\n- add a command only when you want an explicit `/coasts` entrypoint\n- one simple option is to have the command reuse the same skill\n- if you give the command its own separate instructions, you are taking on a\n second copy of the workflow to maintain\n\n### `.cursor/worktrees.json`\n\nUse `.cursor/worktrees.json` for Cursor's own worktree bootstrap, not for Coasts\npolicy:\n\n- install dependencies\n- copy or symlink `.env` files\n- run database migrations or other one-time bootstrap steps\n\nDo not move the Coast Runtime rules or Coast CLI workflow into\n`.cursor/worktrees.json`.\n\n## Example layout\n\n### Cursor only\n\n```text\nAGENTS.md\n.cursor/skills/coasts/SKILL.md\n.cursor/commands/coasts.md # optional\n.cursor/rules/coast.md # optional alternative to AGENTS.md\n.cursor/worktrees.json # optional, for Parallel Agents bootstrap\n```\n\n### Cursor plus other harnesses\n\n```text\nAGENTS.md\nCLAUDE.md\n.agents/skills/coasts/SKILL.md\n.agents/skills/coasts/agents/openai.yaml\n.claude/skills/coasts -> ../../.agents/skills/coasts\n.cursor/commands/coasts.md # optional\n```\n\n## What Coasts does\n\n- **Run** — `coast run ` creates a new Coast instance from the latest build. Use `coast run -w ` to create and assign a Cursor worktree in one step. See [Run](../concepts_and_terminology/RUN.md).\n- **Current checkout** — No special Cursor handling is required when Cursor is\n working directly in the repo you opened.\n- **Bind mount** — For Parallel Agents, Coasts mounts\n `~/.cursor/worktrees/` into the container at\n `/host-external-wt/{index}`.\n- **Discovery** — `git worktree list --porcelain` remains repo-scoped, so Coasts\n only shows Cursor worktrees that belong to the current project.\n- **Naming** — Cursor Parallel Agent worktrees appear by their branch names in\n Coasts' CLI and UI.\n- **Assign** — `coast assign` remounts `/workspace` from the external bind\n mount path when a Cursor worktree is selected.\n- **Gitignored sync** — Continues to work on the host filesystem with absolute\n paths.\n- **Orphan detection** — If Cursor cleans up old worktrees, Coasts can detect\n the missing gitdir and unassign them when needed.\n\n## Example\n\n```toml\n[coast]\nname = \"my-app\"\ncompose = \"./docker-compose.yml\"\nworktree_dir = [\".worktrees\", \".claude/worktrees\", \"~/.codex/worktrees\", \"~/.cursor/worktrees/my-app\"]\nprimary_port = \"web\"\n\n[ports]\nweb = 3000\napi = 8080\n\n[assign]\ndefault = \"none\"\n[assign.services]\nweb = \"hot\"\napi = \"hot\"\n```\n\n- `.claude/worktrees/` — Claude Code worktrees\n- `~/.codex/worktrees/` — Codex worktrees\n- `~/.cursor/worktrees/my-app/` — Cursor Parallel Agent worktrees\n\n## Troubleshooting\n\n- **Worktree not found** — If Coasts expects a worktree to exist but cannot\n find it, verify that the Coastfile's `worktree_dir` includes\n `~/.cursor/worktrees/` and that `` matches the\n actual folder name under `~/.cursor/worktrees/`. See\n [Worktree Directories](../coastfiles/WORKTREE_DIR.md) for syntax and path\n types.\n\n## Limitations\n\n- If you are not using Cursor Parallel Agents, do not add\n `~/.cursor/worktrees/` just because you happen to be editing in\n Cursor.\n- Keep the Coast Runtime rules in one always-on place: `AGENTS.md` or\n `.cursor/rules/coast.md`. Duplicating both invites drift.\n- Keep the reusable `/coasts` workflow in a skill. `.cursor/worktrees.json` is\n for Cursor bootstrap, not Coasts policy.\n- If one repo is shared across Cursor, Codex, Claude Code, or T3 Code, prefer\n the shared layout in [Multiple Harnesses](MULTIPLE_HARNESSES.md).\n", + "harnesses/CURSOR.md": "# Cursor\n\n```youtube\nZmbcZ_QfO6w\n```\n\n## Quick setup\n\nRequires the [Coast CLI](../GETTING_STARTED.md). Copy this prompt into your\nagent's chat to set up Coasts automatically:\n\n```prompt-copy\ncursor_setup_prompt.txt\n```\n\nYou can also get the skill content from the CLI: `coast skills-prompt`.\n\nAfter setup, **restart Cursor** for the skill and rules changes to take effect.\n\n---\n\n[Cursor](https://cursor.com/docs/agent/overview) can work directly in your\ncurrent checkout, and its Parallel Agents feature can also create git\nworktrees under `~/.cursor/worktrees//`.\n\nFor docs about Coasts, that means there are two setup cases:\n\n- if you are just using Cursor in the current checkout, no Cursor-specific\n `worktree_dir` entry is required\n- if you use Cursor Parallel Agents, add the Cursor worktree directory to\n `worktree_dir` so Coasts can discover and assign those worktrees\n\n## Setup\n\n### Current checkout only\n\nIf Cursor is just editing the checkout you already opened, Coasts does not need\nany special Cursor-specific worktree path. Coasts will treat that checkout like\nany other local repository root.\n\n### Cursor Parallel Agents\n\nIf you use Parallel Agents, add `~/.cursor/worktrees/` to\n`worktree_dir`:\n\n```toml\n[coast]\nname = \"my-app\"\nworktree_dir = [\".worktrees\", \"~/.cursor/worktrees/my-app\"]\n```\n\nCursor stores each agent worktree beneath that per-project directory. Coasts\nexpands `~` at runtime and treats the path as external, so existing instances\nmust be recreated for the bind mount to take effect:\n\n```bash\ncoast rm my-instance\ncoast build\ncoast run my-instance\n```\n\nThe worktree listing updates immediately after the Coastfile change, but\nassigning to a Cursor Parallel Agent worktree requires the external bind mount\ninside the container.\n\n## Where Coasts guidance goes\n\n### `AGENTS.md` or `.cursor/rules/coast.md`\n\nPut the short, always-on Coast Runtime rules here:\n\n- use `AGENTS.md` if you want the most portable project instructions\n- use `.cursor/rules/coast.md` if you want Cursor-native project rules and\n settings UI support\n- do not duplicate the same Coast Runtime block in both unless you have a clear\n reason\n\n### `.cursor/skills/coasts/SKILL.md` or shared `.agents/skills/coasts/SKILL.md`\n\nPut the reusable `/coasts` workflow here:\n\n- for a Cursor-only repo, `.cursor/skills/coasts/SKILL.md` is a natural home\n- for a multi-harness repo, keep the canonical skill in\n `.agents/skills/coasts/SKILL.md`; Cursor can load that directly\n- the skill should own the real `/coasts` workflow: `coast lookup`,\n `coast ls`, `coast run`, `coast assign`, `coast unassign`,\n `coast checkout`, and `coast ui`\n\n### `.cursor/commands/coasts.md`\n\nCursor also supports project commands. For docs about Coasts, treat commands as\noptional:\n\n- add a command only when you want an explicit `/coasts` entrypoint\n- one simple option is to have the command reuse the same skill\n- if you give the command its own separate instructions, you are taking on a\n second copy of the workflow to maintain\n\n### `.cursor/worktrees.json`\n\nUse `.cursor/worktrees.json` for Cursor's own worktree bootstrap, not for Coasts\npolicy:\n\n- install dependencies\n- copy or symlink `.env` files\n- run database migrations or other one-time bootstrap steps\n\nDo not move the Coast Runtime rules or Coast CLI workflow into\n`.cursor/worktrees.json`.\n\n## Example layout\n\n### Cursor only\n\n```text\nAGENTS.md\n.cursor/skills/coasts/SKILL.md\n.cursor/commands/coasts.md # optional\n.cursor/rules/coast.md # optional alternative to AGENTS.md\n.cursor/worktrees.json # optional, for Parallel Agents bootstrap\n```\n\n### Cursor plus other harnesses\n\n```text\nAGENTS.md\nCLAUDE.md\n.agents/skills/coasts/SKILL.md\n.agents/skills/coasts/agents/openai.yaml\n.claude/skills/coasts -> ../../.agents/skills/coasts\n.cursor/commands/coasts.md # optional\n```\n\n## What Coasts does\n\n- **Run** — `coast run ` creates a new Coast instance from the latest build. Use `coast run -w ` to create and assign a Cursor worktree in one step. See [Run](../concepts_and_terminology/RUN.md).\n- **Current checkout** — No special Cursor handling is required when Cursor is\n working directly in the repo you opened.\n- **Bind mount** — For Parallel Agents, Coasts mounts\n `~/.cursor/worktrees/` into the container at\n `/host-external-wt/{index}`.\n- **Discovery** — `git worktree list --porcelain` remains repo-scoped, so Coasts\n only shows Cursor worktrees that belong to the current project.\n- **Naming** — Cursor Parallel Agent worktrees appear by their branch names in\n Coasts' CLI and UI.\n- **Assign** — `coast assign` remounts `/workspace` from the external bind\n mount path when a Cursor worktree is selected.\n- **Gitignored sync** — Continues to work on the host filesystem with absolute\n paths.\n- **Orphan detection** — If Cursor cleans up old worktrees, Coasts can detect\n the missing gitdir and unassign them when needed.\n\n## Example\n\n```toml\n[coast]\nname = \"my-app\"\ncompose = \"./docker-compose.yml\"\nworktree_dir = [\".worktrees\", \".claude/worktrees\", \"~/.codex/worktrees\", \"~/.cursor/worktrees/my-app\"]\nprimary_port = \"web\"\n\n[ports]\nweb = 3000\napi = 8080\n\n[assign]\ndefault = \"none\"\n[assign.services]\nweb = \"hot\"\napi = \"hot\"\n```\n\n- `.claude/worktrees/` — Claude Code worktrees\n- `~/.codex/worktrees/` — Codex worktrees\n- `~/.cursor/worktrees/my-app/` — Cursor Parallel Agent worktrees\n\n## Troubleshooting\n\n- **Worktree not found** — If Coasts expects a worktree to exist but cannot\n find it, verify that the Coastfile's `worktree_dir` includes\n `~/.cursor/worktrees/` and that `` matches the\n actual folder name under `~/.cursor/worktrees/`. See\n [Worktree Directories](../coastfiles/WORKTREE_DIR.md) for syntax and path\n types.\n\n## Limitations\n\n- If you are not using Cursor Parallel Agents, do not add\n `~/.cursor/worktrees/` just because you happen to be editing in\n Cursor.\n- Keep the Coast Runtime rules in one always-on place: `AGENTS.md` or\n `.cursor/rules/coast.md`. Duplicating both invites drift.\n- Keep the reusable `/coasts` workflow in a skill. `.cursor/worktrees.json` is\n for Cursor bootstrap, not Coasts policy.\n- If one repo is shared across Cursor, Codex, Claude Code, or T3 Code, prefer\n the shared layout in [Multiple Harnesses](MULTIPLE_HARNESSES.md).\n", "harnesses/MULTIPLE_HARNESSES.md": "# Multiple Harnesses\n\nIf one repository is used from more than one harness, one way to consolidate\nthe Coasts setup is to keep the shared `/coasts` workflow in one place and keep\nthe harness-specific always-on rules in the files for each harness.\n\n## Recommended layout\n\n```text\nAGENTS.md\nCLAUDE.md\n.cursor/rules/coast.md # optional Cursor-native always-on rules\n.agents/skills/coasts/SKILL.md\n.agents/skills/coasts/agents/openai.yaml\n.claude/skills/coasts -> ../../.agents/skills/coasts\n.cursor/commands/coasts.md # optional, thin, harness-specific\n.claude/commands/coasts.md # optional, thin, harness-specific\n```\n\nUse this layout like this:\n\n- `AGENTS.md` — short, always-on rules for working with Coasts in Codex and T3\n Code\n- `.cursor/rules/coast.md` — optional Cursor-native always-on rules\n- `CLAUDE.md` — short, always-on rules for working with Coasts in Claude Code\n and Conductor\n- `.agents/skills/coasts/SKILL.md` — canonical reusable `/coasts` workflow\n- `.agents/skills/coasts/agents/openai.yaml` — optional Codex/OpenAI metadata\n- `.claude/skills/coasts` — Claude-facing mirror or symlink when Claude Code\n also needs the same skill\n- `.cursor/commands/coasts.md` — optional Cursor command file; one simple\n option is to have it reuse the same skill\n- `.claude/commands/coasts.md` — optional explicit command file; one simple\n option is to have it reuse the same skill\n\n## Step-by-step\n\n1. Put the Coast Runtime rules in the always-on instruction files.\n - `AGENTS.md`, `CLAUDE.md`, or `.cursor/rules/coast.md` should answer the\n \"every task\" rules: run `coast lookup` first, use `coast exec`, read logs\n with `coast logs`, ask before `coast assign` or `coast run` when there is\n no match.\n2. Create one canonical skill for Coasts.\n - Put the reusable `/coasts` workflow in `.agents/skills/coasts/SKILL.md`.\n - Use the Coast CLI directly inside that skill: `coast lookup`,\n `coast ls`, `coast run`, `coast assign`, `coast unassign`,\n `coast checkout`, and `coast ui`.\n3. Expose that skill only where a harness needs a different path.\n - Codex, T3 Code, and Cursor can all use `.agents/skills/` directly.\n - Claude Code needs `.claude/skills/`, so mirror or symlink the canonical\n skill into that location.\n4. Add a command file only if you want an explicit `/coasts` entrypoint.\n - If you create `.claude/commands/coasts.md` or\n `.cursor/commands/coasts.md`, one simple option is to have the command\n reuse the same skill.\n - If you give the command its own separate instructions, you are taking on a\n second copy of the workflow to maintain.\n5. Keep Conductor-specific setup in Conductor, not in the skill.\n - Use Conductor Repository Settings scripts for bootstrap or run behavior\n that belongs to Conductor itself.\n - Keep Coasts policy and use of the `coast` CLI in `CLAUDE.md` and the\n shared skill.\n\n## Concrete `/coasts` example\n\nA good shared `coasts` skill should do three jobs:\n\n1. `Use Existing Coast`\n - run `coast lookup`\n - if a match exists, use `coast exec`, `coast ps`, and `coast logs`\n2. `Manage Assignment`\n - run `coast ls`\n - offer `coast run`, `coast assign`, `coast unassign`, or\n `coast checkout`\n - ask before reusing or disrupting an existing slot\n3. `Open UI`\n - run `coast ui`\n\nThat is the right place for the `/coasts` workflow. The always-on files should\nonly hold the short rules that must apply even when the skill is never invoked.\n\n## Symlink pattern\n\nIf you want Claude Code to reuse the same skill as Codex, T3 Code, or Cursor,\none option is a symlink:\n\n```bash\nmkdir -p .claude/skills\nln -s ../../.agents/skills/coasts .claude/skills/coasts\n```\n\nA checked-in mirror is also fine if your team prefers not to use symlinks. The\nmain goal is just to avoid unnecessary drift between copies.\n\n## Harness-specific cautions\n\n- Claude Code: project skills and optional project commands are both valid, but\n keep the logic in the skill.\n- Cursor: use `AGENTS.md` or `.cursor/rules/coast.md` for the short Coast\n Runtime rules, use a skill for the reusable workflow, and keep\n `.cursor/commands` optional.\n- Conductor: treat it as `CLAUDE.md` plus Conductor scripts and settings first.\n If you add a command and it does not appear, fully close and reopen the app\n before checking again.\n- T3 Code: this is the thinnest harness surface here. Use the Codex-style\n `AGENTS.md` plus `.agents/skills` pattern, and do not invent a separate\n T3-specific command layout for docs about Coasts.\n- Codex: keep `AGENTS.md` short and put the reusable workflow in\n `.agents/skills`.\n", "harnesses/SHEP.md": "# Shep\n\n## Quick setup\n\nRequires the [Coast CLI](../GETTING_STARTED.md). Copy this prompt into your\nagent's chat to set up Coasts automatically:\n\n```prompt-copy\nshep_setup_prompt.txt\n```\n\nYou can also get the skill content from the CLI: `coast skills-prompt`.\n\nAfter setup, **quit and reopen your editor** for the new skill and project\ninstructions to take effect.\n\n---\n\n[Shep](https://shep-ai.github.io/cli/) creates worktrees at `~/.shep/repos/{hash}/wt/{branch-slug}`. The hash is the first 16 hex characters of the SHA-256 of the repository's absolute path, so it is deterministic per-repo but opaque. All worktrees for a given repo share the same hash and are differentiated by the `wt/{branch-slug}` subdirectory.\n\nFrom the Shep CLI, `shep feat show ` prints the worktree path, or\n`ls ~/.shep/repos` lists the per-repo hash directories.\n\nBecause the hash varies per repo, Coasts uses a **glob pattern** to discover\nshep worktrees without requiring the user to hard-code the hash.\n\n## Setup\n\nAdd `~/.shep/repos/*/wt` to `worktree_dir`:\n\n```toml\n[coast]\nname = \"my-app\"\nworktree_dir = [\".worktrees\", \"~/.shep/repos/*/wt\"]\n```\n\nThe `*` matches the per-repo hash directory. At runtime Coasts expands the glob,\nfinds the matching directory (e.g. `~/.shep/repos/a21f0cda9ab9d456/wt`), and\nbind-mounts it into the container. See\n[Worktree Directories](../coastfiles/WORKTREE_DIR.md) for full details on glob\npatterns.\n\nAfter changing `worktree_dir`, existing instances must be **recreated** for the bind mount to take effect:\n\n```bash\ncoast rm my-instance\ncoast build\ncoast run my-instance\n```\n\nThe worktree listing updates immediately (Coasts reads the new Coastfile), but\nassigning to a Shep worktree requires the bind mount inside the container.\n\n## Where Coasts guidance goes\n\nShep wraps Claude Code under the hood, so follow the Claude Code conventions:\n\n- put the short Coast Runtime rules in `CLAUDE.md`\n- put the reusable `/coasts` workflow in `.claude/skills/coasts/SKILL.md` or\n the shared `.agents/skills/coasts/SKILL.md`\n- if this repo also uses other harnesses, see\n [Multiple Harnesses](MULTIPLE_HARNESSES.md) and\n [Skills for Host Agents](../SKILLS_FOR_HOST_AGENTS.md)\n\n## What Coasts does\n\n- **Run** -- `coast run ` creates a new Coast instance from the latest build. Use `coast run -w ` to create and assign a Shep worktree in one step. See [Run](../concepts_and_terminology/RUN.md).\n- **Bind mount** -- At container creation, Coasts resolves the glob\n `~/.shep/repos/*/wt` and mounts each matching directory into the container at\n `/host-external-wt/{index}`.\n- **Discovery** -- `git worktree list --porcelain` is repo-scoped, so only\n worktrees belonging to the current project appear.\n- **Naming** -- Shep worktrees use named branches, so they appear by branch\n name in the Coasts UI and CLI (e.g., `feat-green-background`).\n- **Assign** -- `coast assign` remounts `/workspace` from the external bind mount path.\n- **Gitignored sync** -- Runs on the host filesystem with absolute paths, works without the bind mount.\n- **Orphan detection** -- The git watcher scans external directories\n recursively, filtering by `.git` gitdir pointers. If Shep deletes a\n worktree, Coasts auto-unassigns the instance.\n\n## Example\n\n```toml\n[coast]\nname = \"my-app\"\ncompose = \"./docker-compose.yml\"\nworktree_dir = [\".worktrees\", \"~/.shep/repos/*/wt\"]\nprimary_port = \"web\"\n\n[ports]\nweb = 3000\napi = 8080\n\n[assign]\ndefault = \"none\"\n[assign.services]\nweb = \"hot\"\napi = \"hot\"\n```\n\n- `~/.shep/repos/*/wt` -- Shep (external, bind-mounted via glob expansion)\n\n## Shep path structure\n\n```\n~/.shep/repos/\n {sha256-of-repo-path-first-16-chars}/\n wt/\n {branch-slug}/ <-- git worktree\n {branch-slug}/\n```\n\nKey points:\n- Same repo = same hash every time (deterministic, not random)\n- Different repos = different hashes\n- Path separators are normalized to `/` before hashing\n- The hash can be found via `shep feat show ` or `ls ~/.shep/repos`\n\n## Troubleshooting\n\n- **Worktree not found** — If Coasts expects a worktree to exist but cannot\n find it, verify that the Coastfile's `worktree_dir` includes\n `~/.shep/repos/*/wt`. The glob pattern must match Shep's directory structure.\n See [Worktree Directories](../coastfiles/WORKTREE_DIR.md) for syntax and\n path types.\n", "harnesses/T3_CODE.md": "# T3 Code\n\n## Quick setup\n\nRequires the [Coast CLI](../GETTING_STARTED.md). Copy this prompt into your\nagent's chat to set up Coasts automatically:\n\n```prompt-copy\nt3_code_setup_prompt.txt\n```\n\nYou can also get the skill content from the CLI: `coast skills-prompt`.\n\nAfter setup, **restart T3 Code** for the skill and rules changes to take effect.\n\n**Note:** T3 Code may not load project-level skills from `.agents/skills/` or\n`.claude/skills/` yet. The setup prompt also places the skill in\n`~/.codex/skills/coasts/` so it is available globally to the Codex provider.\nThe Coast Runtime rules in `AGENTS.md` and `CLAUDE.md` still apply on every\ntask regardless.\n\n---\n\n[T3 Code](https://github.com/pingdotgg/t3code) creates git worktrees at\n`~/.t3/worktrees//`, checked out on named branches.\n\nT3 Code wraps Codex, so it uses `AGENTS.md` for always-on rules and\n`.agents/skills/coasts/SKILL.md` for the reusable `/coasts` workflow.\n\nBecause these worktrees live outside the project root, Coasts needs explicit\nconfiguration to discover and mount them.\n\n## Setup\n\nAdd `~/.t3/worktrees/` to `worktree_dir`. T3 Code nests worktrees under a per-project subdirectory, so the path must include the project name. In the example below, `my-app` must match the actual folder name under `~/.t3/worktrees/` for your repo.\n\n```toml\n[coast]\nname = \"my-app\"\nworktree_dir = [\".worktrees\", \"~/.t3/worktrees/my-app\"]\n```\n\nCoasts expands `~` at runtime and treats any path starting with `~/` or `/` as\nexternal. See [Worktree Directories](../coastfiles/WORKTREE_DIR.md) for\ndetails.\n\nAfter changing `worktree_dir`, existing instances must be **recreated** for the bind mount to take effect:\n\n```bash\ncoast rm my-instance\ncoast build\ncoast run my-instance\n```\n\nThe worktree listing updates immediately (Coasts reads the new Coastfile), but\nassigning to a T3 Code worktree requires the bind mount inside the container.\n\n## Where Coasts guidance goes\n\nUse this layout for T3 Code:\n\n- put the short Coast Runtime rules in `AGENTS.md`\n- put the reusable `/coasts` workflow in `.agents/skills/coasts/SKILL.md`\n- do not add a separate T3-specific project command or slash-command layer for\n Coasts\n- if this repo uses multiple harnesses, see\n [Multiple Harnesses](MULTIPLE_HARNESSES.md) and\n [Skills for Host Agents](../SKILLS_FOR_HOST_AGENTS.md).\n\n## What Coasts does\n\n- **Run** — `coast run ` creates a new Coast instance from the latest build. Use `coast run -w ` to create and assign a T3 Code worktree in one step. See [Run](../concepts_and_terminology/RUN.md).\n- **Bind mount** — At container creation, Coasts mounts\n `~/.t3/worktrees/` into the container at\n `/host-external-wt/{index}`.\n- **Discovery** — `git worktree list --porcelain` is repo-scoped, so only worktrees belonging to the current project appear.\n- **Naming** — T3 Code worktrees use named branches, so they appear by branch\n name in the Coasts UI and CLI.\n- **Assign** — `coast assign` remounts `/workspace` from the external bind mount path.\n- **Gitignored sync** — Runs on the host filesystem with absolute paths, works without the bind mount.\n- **Orphan detection** — The git watcher scans external directories\n recursively, filtering by `.git` gitdir pointers. If T3 Code removes a\n workspace, Coasts auto-unassigns the instance.\n\n## Example\n\n```toml\n[coast]\nname = \"my-app\"\ncompose = \"./docker-compose.yml\"\nworktree_dir = [\".worktrees\", \".claude/worktrees\", \"~/.codex/worktrees\", \"~/.t3/worktrees/my-app\"]\nprimary_port = \"web\"\n\n[ports]\nweb = 3000\napi = 8080\n\n[assign]\ndefault = \"none\"\n[assign.services]\nweb = \"hot\"\napi = \"hot\"\n```\n\n- `.claude/worktrees/` — Claude Code (local, no special handling)\n- `~/.codex/worktrees/` — Codex (external, bind-mounted)\n- `~/.t3/worktrees/my-app/` — T3 Code (external, bind-mounted; replace `my-app` with your repo folder name)\n\n## Troubleshooting\n\n- **Worktree not found** — If Coasts expects a worktree to exist but cannot\n find it, verify that the Coastfile's `worktree_dir` includes\n `~/.t3/worktrees/` and that `` matches the\n actual folder name under `~/.t3/worktrees/`. See\n [Worktree Directories](../coastfiles/WORKTREE_DIR.md) for syntax and path\n types.\n\n## Limitations\n\n- Avoid relying on T3 Code-specific environment variables for runtime\n configuration inside Coasts. Coasts manages ports, workspace paths, and\n service discovery independently — use Coastfile `[ports]` and `coast exec`\n instead.\n", diff --git a/docs/harnesses/CURSOR.md b/docs/harnesses/CURSOR.md index 2120333..b0b3224 100644 --- a/docs/harnesses/CURSOR.md +++ b/docs/harnesses/CURSOR.md @@ -1,5 +1,9 @@ # Cursor +```youtube +ZmbcZ_QfO6w +``` + ## Quick setup Requires the [Coast CLI](../GETTING_STARTED.md). Copy this prompt into your diff --git a/docs/harnesses/README.md b/docs/harnesses/README.md index 91c3212..d313087 100644 --- a/docs/harnesses/README.md +++ b/docs/harnesses/README.md @@ -1,5 +1,9 @@ # Harnesses +```youtube +AWhaeam9R7o +``` + Each harness creates git worktrees in a different location. In Coasts, the [`worktree_dir`](../coastfiles/WORKTREE_DIR.md) array tells it where to look -- including external paths like `~/.codex/worktrees` that require additional