diff --git a/cmd/shelley/main.go b/cmd/shelley/main.go
index e2699ec..22ff991 100644
--- a/cmd/shelley/main.go
+++ b/cmd/shelley/main.go
@@ -95,8 +95,9 @@ func runServe(global GlobalConfig, args []string) {
database := setupDatabase(global.DBPath, logger)
defer database.Close()
- // Set the database path for system prompt generation
+ // Set the database path and port for system prompt generation
server.DBPath = global.DBPath
+ server.Port = *port
// Build LLM configuration
llmConfig := buildLLMConfig(logger, global.ConfigPath, global.TerminalURL, global.DefaultModel)
diff --git a/loop/loop.go b/loop/loop.go
index 246a96b..2ff33e7 100644
--- a/loop/loop.go
+++ b/loop/loop.go
@@ -82,6 +82,16 @@ func NewLoop(config Config) *Loop {
}
}
+// SetSystem sets the system prompt for the loop.
+// This should be called before processing any messages if the system
+// prompt wasn't available at loop creation time.
+func (l *Loop) SetSystem(system []llm.SystemContent) {
+ l.mu.Lock()
+ defer l.mu.Unlock()
+ l.system = system
+ l.logger.Debug("set system prompt", "system_items", len(system))
+}
+
// QueueUserMessage adds a user message to the queue to be processed
func (l *Loop) QueueUserMessage(message llm.Message) {
l.mu.Lock()
diff --git a/server/convo.go b/server/convo.go
index f4c528d..c48a28d 100644
--- a/server/convo.go
+++ b/server/convo.go
@@ -40,6 +40,7 @@ type ConversationManager struct {
hydrated bool
hasConversationEvents bool
+ needsSystemPrompt bool
cwd string // working directory for tools
}
@@ -87,15 +88,7 @@ func (cm *ConversationManager) Hydrate(ctx context.Context) error {
return fmt.Errorf("failed to get conversation history: %w", err)
}
- if conversation.UserInitiated && !hasSystemMessage(messages) {
- systemMsg, err := cm.createSystemPrompt(ctx)
- if err != nil {
- return err
- }
- if systemMsg != nil {
- messages = append(messages, *systemMsg)
- }
- }
+ cm.needsSystemPrompt = conversation.UserInitiated && !hasSystemMessage(messages)
history, system := cm.partitionMessages(messages)
@@ -135,6 +128,17 @@ func (cm *ConversationManager) AcceptUserMessage(ctx context.Context, service ll
return false, err
}
+ // Create system prompt now that we know the model
+ cm.mu.Lock()
+ needsSystemPrompt := cm.needsSystemPrompt
+ cm.needsSystemPrompt = false
+ cm.mu.Unlock()
+ if needsSystemPrompt {
+ if _, err := cm.createSystemPrompt(ctx); err != nil {
+ return false, fmt.Errorf("failed to create system prompt: %w", err)
+ }
+ }
+
cm.mu.Lock()
isFirst := !cm.hasConversationEvents
cm.hasConversationEvents = true
@@ -178,7 +182,7 @@ func hasSystemMessage(messages []generated.Message) bool {
}
func (cm *ConversationManager) createSystemPrompt(ctx context.Context) (*generated.Message, error) {
- systemPrompt, err := GenerateSystemPrompt(cm.cwd)
+ systemPrompt, err := GenerateSystemPrompt(cm.cwd, cm.modelID)
if err != nil {
return nil, fmt.Errorf("failed to generate system prompt: %w", err)
}
@@ -209,6 +213,14 @@ func (cm *ConversationManager) createSystemPrompt(ctx context.Context) (*generat
cm.logger.Warn("Failed to update conversation timestamp after system prompt", "error", err)
}
+ // Update the loop's system prompt if it's already running
+ cm.mu.Lock()
+ loopInstance := cm.loop
+ cm.mu.Unlock()
+ if loopInstance != nil {
+ loopInstance.SetSystem([]llm.SystemContent{{Type: "text", Text: systemPrompt}})
+ }
+
cm.logger.Info("Stored system prompt", "length", len(systemPrompt))
return created, nil
}
diff --git a/server/skill_test.go b/server/skill_test.go
new file mode 100644
index 0000000..fe2cc8b
--- /dev/null
+++ b/server/skill_test.go
@@ -0,0 +1,76 @@
+package server
+
+import "testing"
+
+func TestParseSkillPreamble(t *testing.T) {
+ tests := []struct {
+ name string
+ content string
+ expected string
+ }{
+ {
+ name: "name and inline description",
+ content: `---
+name: test-skill
+description: A simple test skill
+---
+# Test Skill
+`,
+ expected: "test-skill: A simple test skill",
+ },
+ {
+ name: "multiline description with >",
+ content: `---
+name: pdf-processor
+description: >
+ Process PDF files including extraction,
+ form filling, and merging.
+---
+# PDF Processor
+`,
+ expected: "pdf-processor: Process PDF files including extraction, form filling, and merging.",
+ },
+ {
+ name: "multiline description with |",
+ content: `---
+name: code-reviewer
+description: |
+ Reviews code for quality.
+ Checks for common issues.
+---
+`,
+ expected: "code-reviewer: Reviews code for quality. Checks for common issues.",
+ },
+ {
+ name: "name only",
+ content: `---
+name: minimal-skill
+---
+# Minimal
+`,
+ expected: "minimal-skill",
+ },
+ {
+ name: "no name",
+ content: `---
+description: A skill without a name
+---
+`,
+ expected: "",
+ },
+ {
+ name: "no frontmatter",
+ content: "# Just a markdown file\n",
+ expected: "",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got := parseSkillPreamble(tt.content)
+ if got != tt.expected {
+ t.Errorf("parseSkillPreamble() = %q, want %q", got, tt.expected)
+ }
+ })
+ }
+}
diff --git a/server/system_prompt.go b/server/system_prompt.go
index 3c30618..14b59f0 100644
--- a/server/system_prompt.go
+++ b/server/system_prompt.go
@@ -22,11 +22,17 @@ type SystemPromptData struct {
IsSudoAvailable bool
Hostname string // For exe.dev, the public hostname (e.g., "vmname.exe.xyz")
ShelleyDBPath string // Path to the shelley database
+ ShelleyPort string // Port Shelley is running on
+ ShelleyBaseURL string // Full base URL for Shelley web UI
+ ModelName string // LLM model being used for this conversation
}
// DBPath is the path to the shelley database, set at startup
var DBPath string
+// Port is the port Shelley is running on, set at startup
+var Port string
+
type GitInfo struct {
Root string
}
@@ -35,15 +41,17 @@ type CodebaseInfo struct {
InjectFiles []string
InjectFileContents map[string]string
GuidanceFiles []string
+ SkillPreambles []string // Preambles from installed skills
}
// GenerateSystemPrompt generates the system prompt using the embedded template.
// If workingDir is empty, it uses the current working directory.
-func GenerateSystemPrompt(workingDir string) (string, error) {
+func GenerateSystemPrompt(workingDir string, modelName string) (string, error) {
data, err := collectSystemData(workingDir)
if err != nil {
return "", fmt.Errorf("failed to collect system data: %w", err)
}
+ data.ModelName = modelName
tmpl, err := template.New("system_prompt").Parse(systemPromptTemplate)
if err != nil {
@@ -102,6 +110,25 @@ func collectSystemData(workingDir string) (*SystemPromptData, error) {
}
}
+ // Set Shelley port and base URL
+ if Port != "" {
+ data.ShelleyPort = Port
+ if data.IsExeDev {
+ if Port == "9999" {
+ // Default port uses shelley.exe.xyz subdomain
+ if hostname, err := os.Hostname(); err == nil {
+ data.ShelleyBaseURL = "https://" + hostname + ".shelley.exe.xyz"
+ }
+ } else {
+ // Other ports use hostname:port
+ data.ShelleyBaseURL = "https://" + data.Hostname + ":" + Port
+ }
+ } else {
+ // Not exe.dev - use localhost
+ data.ShelleyBaseURL = "http://localhost:" + Port
+ }
+ }
+
// Set shelley database path if it was configured
if DBPath != "" {
// Convert to absolute path if relative
@@ -162,6 +189,9 @@ func collectCodebaseInfo(wd string, gitInfo *GitInfo) (*CodebaseInfo, error) {
seenFiles[lowerPath] = true
}
}
+
+ // Load installed skills from ~/.config/shelley/skills/
+ info.SkillPreambles = loadSkillPreambles(home)
}
// Determine the root directory to search
@@ -287,3 +317,82 @@ func isSudoAvailable() bool {
_, err := cmd.CombinedOutput()
return err == nil
}
+
+// loadSkillPreambles reads SKILL.md files from ~/.config/shelley/skills/ (following
+// Anthropic's skill format) and extracts name + description from YAML frontmatter.
+func loadSkillPreambles(home string) []string {
+ skillsDir := filepath.Join(home, ".config", "shelley", "skills")
+ entries, err := os.ReadDir(skillsDir)
+ if err != nil {
+ return nil
+ }
+
+ var preambles []string
+ for _, entry := range entries {
+ if !entry.IsDir() {
+ continue
+ }
+ skillFile := filepath.Join(skillsDir, entry.Name(), "SKILL.md")
+ content, err := os.ReadFile(skillFile)
+ if err != nil {
+ continue
+ }
+ if preamble := parseSkillPreamble(string(content)); preamble != "" {
+ preambles = append(preambles, preamble)
+ }
+ }
+ return preambles
+}
+
+// parseSkillPreamble extracts name and description from YAML frontmatter,
+// following Anthropic's skill format (https://docs.anthropic.com/en/docs/claude-code/skills).
+// Returns "name: description" for injection into the system prompt.
+func parseSkillPreamble(content string) string {
+ if !strings.HasPrefix(content, "---") {
+ return ""
+ }
+ // Find the closing ---
+ endIdx := strings.Index(content[3:], "\n---")
+ if endIdx == -1 {
+ return ""
+ }
+ frontmatter := content[4 : endIdx+3] // Skip initial ---\n
+
+ // Extract name and description fields
+ var name, description string
+ lines := strings.Split(frontmatter, "\n")
+ for i, line := range lines {
+ trimmed := strings.TrimSpace(line)
+ if strings.HasPrefix(trimmed, "name:") {
+ name = strings.TrimSpace(strings.TrimPrefix(trimmed, "name:"))
+ } else if strings.HasPrefix(trimmed, "description:") {
+ // Check if it's inline or multiline (> or |)
+ value := strings.TrimSpace(strings.TrimPrefix(trimmed, "description:"))
+ if value != "" && value != "|" && value != ">" {
+ description = value
+ } else {
+ // Multiline: collect indented lines
+ var multiline []string
+ for j := i + 1; j < len(lines); j++ {
+ if len(lines[j]) == 0 {
+ continue
+ }
+ if lines[j][0] == ' ' || lines[j][0] == '\t' {
+ multiline = append(multiline, strings.TrimSpace(lines[j]))
+ } else {
+ break
+ }
+ }
+ description = strings.Join(multiline, " ")
+ }
+ }
+ }
+
+ if name == "" {
+ return ""
+ }
+ if description == "" {
+ return name
+ }
+ return name + ": " + description
+}
diff --git a/server/system_prompt.txt b/server/system_prompt.txt
index 8fd253b..f163c80 100644
--- a/server/system_prompt.txt
+++ b/server/system_prompt.txt
@@ -3,6 +3,9 @@ You are Shelley, a coding agent and assistant. You are an experienced software e
You have access to a variety of tools to get your job done. Be persistent and creative.
Working directory: {{.WorkingDirectory}}
+{{if .ModelName}}
+Model: {{.ModelName}}
+{{end}}
{{if .GitInfo}}
Git repository root: {{.GitInfo.Root}}
@@ -62,6 +65,25 @@ Direct user instructions from the current conversation always take highest prece
{{end}}
{{end}}
{{end}}
+
+Skills extend Shelley with additional capabilities, following Anthropic's skill format.
+Skills are installed in ~/.config/shelley/skills//SKILL.md.
+
+To install a skill from a URL: curl -sL -o /tmp/s.zip && n=$(unzip -p /tmp/s.zip SKILL.md | grep '^name:' | cut -d: -f2 | tr -d ' ') && mkdir -p ~/.config/shelley/skills/$n && unzip -o /tmp/s.zip -d ~/.config/shelley/skills/$n
+{{if .Codebase.SkillPreambles}}
+Installed skills:
+{{range .Codebase.SkillPreambles}}- {{.}}
+{{end}}{{else}}No skills currently installed.
+{{end}}
+To check for newly installed skills or get full details, read ~/.config/shelley/skills/ and each skill's SKILL.md.
+
+{{if .ShelleyBaseURL}}
+
+Shelley web UI: {{.ShelleyBaseURL}}
+To link to a conversation: {{.ShelleyBaseURL}}/c/
+To link to this conversation, use the slug from the current URL or query the database.
+
+{{end}}
{{if .ShelleyDBPath}}
Your conversation history is stored in a SQLite database at: {{.ShelleyDBPath}}