Skip to content
This repository was archived by the owner on Apr 19, 2026. It is now read-only.

Commit b9a19e0

Browse files
authored
Fix #76: Add remote Ollama support for server mode (#126)
- When isServer=true (headless Linux without GPU), offer OLLAMA_HOST configuration - Allow Goose installation on server mode (works with remote Ollama via OLLAMA_HOST) - Update setup wizard to guide users through remote GPU endpoint setup - Keep backward compatibility for local Ollama on Mac/GPU systems This enables dogfood on jared-box (headless WSL2 + RunPod GPU) by allowing remote Ollama configuration instead of skipping Ollama entirely.
1 parent 48dc63f commit b9a19e0

1 file changed

Lines changed: 34 additions & 6 deletions

File tree

β€Žcmd/shellforge/main.goβ€Ž

Lines changed: 34 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -155,9 +155,26 @@ model := ""
155155
// ── Step 1: Ollama (skip on headless server) ──
156156
steps++
157157
if isServer {
158-
fmt.Printf("── Step %d/%d: Ollama (skipped β€” server mode) ──\n", steps, total)
159-
fmt.Println(" Detected: Linux, no GPU β€” skipping local model setup")
160-
fmt.Println(" Use CLI drivers instead: shellforge run claude, copilot, codex, gemini")
158+
fmt.Printf("── Step %d/%d: Ollama (server mode) ──\n", steps, total)
159+
fmt.Println(" Detected: Linux, no GPU β€” remote Ollama configuration")
160+
fmt.Println()
161+
fmt.Print(" Configure remote Ollama (OLLAMA_HOST) for GPU endpoint? [Y/n] ")
162+
if confirm(reader) {
163+
fmt.Print(" Enter OLLAMA_HOST (e.g., http://192.168.1.100:11434): ")
164+
ollamaHost := readLine(reader)
165+
if ollamaHost != "" {
166+
fmt.Printf(" β†’ Set OLLAMA_HOST=%s before running shellforge\n", ollamaHost)
167+
fmt.Println(" βœ“ Remote Ollama configured")
168+
} else {
169+
fmt.Println(" ⚠ No OLLAMA_HOST set β€” will use default (localhost:11434)")
170+
}
171+
} else {
172+
fmt.Println(" Skipped remote Ollama configuration")
173+
}
174+
fmt.Println(" Note: Use CLI drivers for API-based inference:")
175+
fmt.Println(" shellforge run claude \"review open PRs\"")
176+
fmt.Println(" shellforge run copilot \"update docs\"")
177+
fmt.Println(" shellforge run codex \"generate tests\"")
161178
fmt.Println()
162179
} else {
163180
fmt.Printf("── Step %d/%d: Ollama (local LLM inference) ──\n", steps, total)
@@ -300,10 +317,12 @@ fmt.Println()
300317
steps++
301318
fmt.Printf("── Step %d/%d: Agent drivers ──\n", steps, total)
302319

303-
// On Mac/GPU: offer Goose (local models via Ollama). On server: skip, show API drivers.
304-
if !isServer {
320+
// Offer Goose for both local and remote Ollama (works with OLLAMA_HOST)
305321
if _, err := exec.LookPath("goose"); err != nil {
306322
fmt.Println(" Goose β€” AI agent with native Ollama support (actually executes tools)")
323+
if isServer {
324+
fmt.Println(" Note: Works with remote Ollama via OLLAMA_HOST environment variable")
325+
}
307326
fmt.Print(" Install Goose? [Y/n] ")
308327
if confirm(reader) {
309328
fmt.Println(" β†’ Installing Goose...")
@@ -314,12 +333,19 @@ run("sh", "-c", "curl -fsSL https://github.com/block/goose/releases/download/sta
314333
}
315334
if _, err := exec.LookPath("goose"); err == nil {
316335
fmt.Println(" βœ“ Goose installed")
336+
if isServer {
337+
fmt.Println(" β†’ Run 'goose configure' and set OLLAMA_HOST for remote GPU endpoint")
338+
} else {
317339
fmt.Println(" β†’ Run 'goose configure' to set up Ollama provider")
340+
}
318341
} else {
319342
fmt.Println(" ⚠ Install failed β€” try: brew install --cask block-goose")
320343
}
321344
}
322345
} else {
346+
if isServer {
347+
fmt.Println(" βœ“ Goose installed (works with remote Ollama via OLLAMA_HOST)")
348+
} else {
323349
fmt.Println(" βœ“ Goose installed (local model driver)")
324350
}
325351
}
@@ -397,7 +423,9 @@ fmt.Println("β•‘ Setup Complete β•‘")
397423
fmt.Println("β•šβ•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•")
398424
fmt.Println()
399425
if isServer {
400-
fmt.Println(" Server mode β€” use CLI drivers:")
426+
fmt.Println(" Server mode β€” remote Ollama configuration available")
427+
fmt.Println(" Set OLLAMA_HOST for remote GPU endpoint")
428+
fmt.Println(" shellforge run goose \"describe this project\" (works with OLLAMA_HOST)")
401429
fmt.Println(" shellforge run claude \"review open PRs\"")
402430
fmt.Println(" shellforge run copilot \"update docs\"")
403431
fmt.Println(" shellforge run codex \"generate tests\"")

0 commit comments

Comments
Β (0)