Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions AGENT.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,12 @@ Séance is a Nim-based library and CLI utility. The core functionality lives in

- Install Nim: `curl https://nim-lang.org/choosenim/init.sh -sSf | sh -s -- -y`
- Add it to your path: `export PATH=/home/jules/.nimble/bin:$PATH`
- Install dependencies: `nimble install -d --accept`
- Install dependencies: `nimble install -d --accept`
- Update lock file: `nimble lock`
- Typecheck and lint: `nim check src/`
- Reformat: `nimpretty`
- Run tests: `nimble test`
- Build for production: `nimble build`
- Build: `nimble build -d:ssl`

## Code Style

Expand Down
15 changes: 15 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -216,6 +216,21 @@ let response = session.chat("Hello!")

That's it! No complex message arrays, no role management, just simple text in and text out with automatic conversation handling.

### Shell Completion

Séance uses Carapace to generate shell-specific completions from a single spec.
Install Carapace and add the following to your shell’s startup file:

**Bash, Zsh**
```bash
source <(seance completion)
```

**Fish, Nushell**
```fish
seance completion | source
```

## Development

To contribute to Séance or run it from the source:
Expand Down
55 changes: 55 additions & 0 deletions carapace.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
name: seance
description: A CLI tool and library for interacting with various LLMs
completion:
bash:
- carapace
fish:
- carapace
zsh:
- carapace
nushell:
- carapace

commands:
- name: chat
description: Sends a single chat prompt to the specified provider and prints the response.
flags:
--provider:
description: LLM provider to use
completion:
static:
- OpenAI
- Anthropic
- Gemini
- OpenRouter
- LMStudio
--model:
description: LLM model to use
--system-prompt:
description: System prompt to guide the model's response
--session:
description: UUID session ID
--verbose:
description: Verbosity level (0=info, 1=debug, 2=all)
--dry-run:
description: If true, prints the final prompt instead of sending it to the LLM.
--no-session:
description: If true, no session will be loaded or saved.
--json:
description: If true, the response will be in JSON format.
--schema:
description: Path to a JSON schema file
arguments:
- name: prompt
description: Prompt to send to the LLM
variadic: true

- name: list
description: Lists all available sessions.

- name: prune
description: Deletes all sessions older than the specified number of days.
flags:
--days:
description: The number of days to keep sessions.
default: 10
16 changes: 0 additions & 16 deletions nimble.lock

This file was deleted.

3 changes: 2 additions & 1 deletion seance.nimble
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Package

version = "0.5.0"
version = "0.5.1"
author = "Emre Şafak"
description = "A CLI tool and library for interacting with various LLMs"
license = "MIT"
Expand All @@ -13,3 +13,4 @@ bin = @["seance"]

# requires "nim >= 2.0"
requires "cligen >= 1.8.6"
requires "yaml"
5 changes: 3 additions & 2 deletions src/seance.nim
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import seance/commands # Import the module containing our command procedures
import seance/commands, seance/completion # Import the module containing our command procedures
from seance/simple import chat, newSession, resetSession, Session
from seance/types import Provider

Expand Down Expand Up @@ -33,7 +33,7 @@ when isMainModule:
commands.chat,
help = {
"prompt": "Prompt to send to the LLM. Can be combined with stdin input.",
"provider": "LLM provider to use: OpenAI, Anthropic, or Gemini.",
"provider": "LLM provider to use: OpenAI, Anthropic, Gemini, OpenRouter, or LMStudio.",
"session": "UUID session ID.",
"model": "LLM model to use.",
"systemPrompt": "System prompt to guide the model's response.",
Expand All @@ -45,4 +45,5 @@ when isMainModule:
],
[commands.list],
[commands.prune, help = {"days": "The number of days to keep sessions."}],
[completion.completion],
)
7 changes: 7 additions & 0 deletions src/seance/completion.nim
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
import std/os

proc completion*() : string =
## Outputs the completion script for the specified shell.
const carapaceYaml = currentSourcePath().parentDir().parentDir().parentDir() / "carapace.yaml"
const carapaceSpec = staticRead(carapaceYaml)
result = carapaceSpec
16 changes: 16 additions & 0 deletions tests/t_completion.nim
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import std/[json, os, strutils, syncio, tempfiles, unittest]
import seance/completion
import yaml

const yamlPath = joinPath("carapace.yaml")
const carapaceYaml = readFile(yamlPath)

suite "Carapace completion":
test "completion procedure outputs the spec":
check completion() == carapaceYaml
echo(len(completion()), ",", len(carapaceYaml))

suite "YAML validation":
test "carapace.yaml is valid YAML":
var parsed: YamlNode
load(carapaceYaml, parsed) # This will raise an exception if the YAML is invalid