Skip to content
This repository was archived by the owner on Apr 19, 2026. It is now read-only.

Commit 7a7dda7

Browse files
jpleva91claude
andcommitted
feat: environment-aware setup wizard + governed Crush install
Setup wizard now detects Mac vs headless Linux server: Mac mode: - Installs Ollama + model picker - Installs Crush from AgentGuardHQ fork (governance built in) - Shows local model tips Server mode (Linux, no GPU): - Skips Ollama entirely - Skips Crush (no local models) - Shows API drivers (Claude Code, Copilot, Codex, Gemini) - Next steps point to shellforge run claude/copilot Also: hasGPU() detection (Metal on Mac, nvidia-smi on Linux) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent 1bb9d6b commit 7a7dda7

1 file changed

Lines changed: 82 additions & 24 deletions

File tree

cmd/shellforge/main.go

Lines changed: 82 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ import (
1919
"github.com/AgentGuardHQ/shellforge/internal/scheduler"
2020
)
2121

22-
var version = "0.3.6"
22+
var version = "0.4.0"
2323

2424
func main() {
2525
if len(os.Args) < 2 {
@@ -116,8 +116,18 @@ reader := bufio.NewReader(os.Stdin)
116116
steps := 0
117117
total := 6
118118

119-
// ── Step 1: Ollama ──
119+
// ── Detect environment ──
120+
isServer := !hasGPU() && runtime.GOOS == "linux"
121+
model := ""
122+
123+
// ── Step 1: Ollama (skip on headless server) ──
120124
steps++
125+
if isServer {
126+
fmt.Printf("── Step %d/%d: Ollama (skipped — server mode) ──\n", steps, total)
127+
fmt.Println(" Detected: Linux, no GPU — skipping local model setup")
128+
fmt.Println(" Use CLI drivers instead: shellforge run claude, copilot, codex, gemini")
129+
fmt.Println()
130+
} else {
121131
fmt.Printf("── Step %d/%d: Ollama (local LLM inference) ──\n", steps, total)
122132
if _, err := exec.LookPath("ollama"); err != nil {
123133
fmt.Print(" Ollama not found. Install? [Y/n] ")
@@ -157,7 +167,7 @@ fmt.Println(" 4) phi4 — 9 GB RAM, Microsoft")
157167
fmt.Println(" 5) other — enter a custom model name")
158168
fmt.Print(" Pick model [2]: ")
159169
choice := readLine(reader)
160-
model := "qwen3:8b"
170+
model = "qwen3:8b"
161171
switch strings.TrimSpace(choice) {
162172
case "1":
163173
model = "qwen3:1.7b"
@@ -177,11 +187,11 @@ fmt.Printf(" → Pulling %s (this may take a few minutes)...\n", model)
177187
run("ollama", "pull", model)
178188
fmt.Printf(" ✓ Model ready: %s\n", model)
179189

180-
// Set env hint
181190
if model != ollama.Model {
182191
fmt.Printf(" Note: set OLLAMA_MODEL=%s before running shellforge\n", model)
183192
}
184193
fmt.Println()
194+
}
185195

186196
// ── Step 2: Governance ──
187197
steps++
@@ -257,32 +267,52 @@ fmt.Println()
257267
// ── Step 5: Agent drivers ──
258268
steps++
259269
fmt.Printf("── Step %d/%d: Agent drivers ──\n", steps, total)
260-
driverList := []struct {
270+
271+
// On Mac: offer Crush (local models). On server: skip Crush, show API drivers.
272+
if !isServer {
273+
if _, err := exec.LookPath("crush"); err != nil {
274+
fmt.Println(" Crush — Go AI coding agent with AgentGuard governance (local models)")
275+
fmt.Print(" Install Crush? [Y/n] ")
276+
if confirm(reader) {
277+
fmt.Println(" → Installing Crush (AgentGuardHQ fork with governance)...")
278+
run("go", "install", "github.com/AgentGuardHQ/crush@latest")
279+
if _, err := exec.LookPath("crush"); err == nil {
280+
fmt.Println(" ✓ Crush installed with AgentGuard governance built in")
281+
} else {
282+
fmt.Println(" ⚠ Install failed — try: go install github.com/AgentGuardHQ/crush@latest")
283+
}
284+
}
285+
} else {
286+
fmt.Println(" ✓ Crush installed (local model driver)")
287+
}
288+
}
289+
290+
// Show API-based drivers
291+
apiDrivers := []struct {
261292
name string
262293
bin string
263294
install string
264-
desc string
265295
}{
266-
{"Crush", "crush", "brew install charmbracelet/tap/crush", "Go AI coding agent (local models)"},
267-
{"Claude Code", "claude", "npm i -g @anthropic-ai/claude-code", "Anthropic Claude CLI"},
268-
{"Copilot CLI", "github-copilot-cli", "gh extension install github/gh-copilot", "GitHub Copilot"},
269-
{"Codex CLI", "codex", "npm i -g @openai/codex", "OpenAI Codex"},
270-
{"Gemini CLI", "gemini", "npm i -g @anthropic-ai/gemini-cli", "Google Gemini"},
296+
{"Claude Code", "claude", "npm i -g @anthropic-ai/claude-code"},
297+
{"Copilot CLI", "github-copilot-cli", "gh extension install github/gh-copilot"},
298+
{"Codex CLI", "codex", "npm i -g @openai/codex"},
299+
{"Gemini CLI", "gemini", "npm i -g @google/gemini-cli"},
271300
}
301+
fmt.Println()
302+
fmt.Println(" API-based drivers (use their own model APIs):")
272303
installedDrivers := 0
273-
for _, d := range driverList {
304+
for _, d := range apiDrivers {
274305
if _, err := exec.LookPath(d.bin); err == nil {
275-
fmt.Printf(" ✓ %s installed (%s)\n", d.name, d.desc)
306+
fmt.Printf(" ✓ %s installed\n", d.name)
276307
installedDrivers++
277308
} else {
278-
fmt.Printf(" ○ %s not found\n", d.name)
279-
fmt.Printf(" → %s\n", d.install)
309+
fmt.Printf(" ○ %s → %s\n", d.name, d.install)
280310
}
281311
}
282-
if installedDrivers == 0 {
312+
if isServer && installedDrivers == 0 {
283313
fmt.Println()
284-
fmt.Println(" No drivers installed. ShellForge's built-in agent still works.")
285-
fmt.Println(" Install drivers to use: shellforge run claude \"prompt\"")
314+
fmt.Println(" No drivers installed. Install at least one:")
315+
fmt.Println(" npm i -g @anthropic-ai/claude-code")
286316
}
287317
fmt.Println()
288318

@@ -329,16 +359,28 @@ fmt.Println("╔═════════════════════
329359
fmt.Println("║ Setup Complete ║")
330360
fmt.Println("╚══════════════════════════════════════╝")
331361
fmt.Println()
332-
fmt.Println(" Quick start:")
333-
fmt.Printf(" shellforge agent \"describe this project\"")
334-
fmt.Println()
362+
if isServer {
363+
fmt.Println(" Server mode — use CLI drivers:")
364+
fmt.Println(" shellforge run claude \"review open PRs\"")
365+
fmt.Println(" shellforge run copilot \"update docs\"")
366+
fmt.Println(" shellforge run codex \"generate tests\"")
335367
fmt.Println()
336368
fmt.Println(" Run a swarm:")
337-
fmt.Println(" dagu server --dags=./dags # web UI at :8080")
338-
fmt.Println(" dagu start dags/sdlc-swarm.yaml # run now")
369+
fmt.Println(" shellforge swarm # start Dagu dashboard")
370+
fmt.Println(" dagu start dags/multi-driver-swarm.yaml")
371+
} else {
372+
fmt.Println(" Quick start:")
373+
fmt.Println(" shellforge agent \"describe this project\"")
374+
fmt.Println(" shellforge run crush \"find test gaps\"")
339375
fmt.Println()
340-
fmt.Printf(" Tip: export OLLAMA_MODEL=%s\n", model)
376+
fmt.Println(" Run a swarm:")
377+
fmt.Println(" shellforge swarm # start Dagu dashboard")
378+
fmt.Println(" dagu start dags/sdlc-swarm.yaml")
379+
if model != "" {
380+
fmt.Printf("\n Tip: export OLLAMA_MODEL=%s\n", model)
381+
}
341382
fmt.Println(" Tip: export OLLAMA_KV_CACHE_TYPE=q8_0 # halves memory per agent")
383+
}
342384
fmt.Println()
343385
}
344386

@@ -788,3 +830,19 @@ cmd.Stdout = os.Stdout
788830
cmd.Stderr = os.Stderr
789831
cmd.Run()
790832
}
833+
834+
// hasGPU detects if the machine has a GPU (Metal on macOS, NVIDIA on Linux).
835+
func hasGPU() bool {
836+
if runtime.GOOS == "darwin" {
837+
return true // All Macs have Metal GPU
838+
}
839+
// Linux: check for NVIDIA GPU
840+
if _, err := exec.LookPath("nvidia-smi"); err == nil {
841+
return true
842+
}
843+
// Check for render devices (AMD/Intel)
844+
if _, err := os.Stat("/dev/dri/renderD128"); err == nil {
845+
return true
846+
}
847+
return false
848+
}

0 commit comments

Comments
 (0)