Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ auto_session = true

[openai]
key = sk-...
model = gpt-4.1-nano-2025-04-14
`model = gpt-5-nano

[gemini]
key = ...
Expand Down Expand Up @@ -69,8 +69,8 @@ seance chat "What is the speed of light?"
# Specify a provider for your query
seance chat "Explain the theory of relativity" --provider gemini

# Use piped content as input
cat src/seance.nim | seance chat "Explain what this Nim code does."
# Use piping to write your PR descriptions
git diff main... | seance chat "Write a conventional commit PR"
```

### 3. Session Management
Expand Down Expand Up @@ -104,7 +104,7 @@ This will delete all sessions older than 10 days, or whatever you specify with -
seance chat "This chat should not be saved." --no_session
```

- **Using a JSON Schema**: To force the output to be in a specific JSON format, you can use the `--json` flag. For the Gemini, Anthropic, and OpenAI providers, you can also use the `--schema` flag to provide a JSON schema to which the output must conform.
- **Using a JSON Schema**: To force the output to be in a specific JSON format, you can use the `--json` flag. For the Gemini, Anthropic, OpenAI, and LMStudio providers, you can also use the `--schema` flag to provide a JSON schema to which the output must conform.

```bash
# Create a schema file
Expand Down
2 changes: 1 addition & 1 deletion seance.nimble
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Package

version = "0.4.0"
version = "0.5.0"
author = "Emre Şafak"
description = "A CLI tool and library for interacting with various LLMs"
license = "MIT"
Expand Down
22 changes: 14 additions & 8 deletions src/seance/providers/lmstudio.nim
Original file line number Diff line number Diff line change
Expand Up @@ -96,15 +96,21 @@ method chat*(provider: LMStudioProvider, messages: seq[ChatMessage], model: Opti
processedMessages.add(ChatMessage(role: system, content: "Return the response in JSON format."))

var requestBody: string
let request = LMStudioChatRequest(model: usedModel, messages: processedMessages)
var requestJson = %*request
if jsonMode:
let schemaNode = schema.get(%*{"type": "object"})
let request = LMStudioChatRequest(model: usedModel, messages: processedMessages)
var requestJson = %*request
requestJson["response_format"] = %*{"type": "json_object"}
requestBody = $requestJson
else:
let request = LMStudioChatRequest(model: usedModel, messages: processedMessages)
requestBody = $(%*request)
if schema.isSome:
requestJson["response_format"] = %*{
"type": "json_schema",
"json_schema": {
"name": "json_schema",
"strict": true,
"schema": schema.get()
}
}
else:
requestJson["response_format"] = %*{"type": "json_object"}
requestBody = $requestJson

info "LMStudio Request Body: " & requestBody
debug "curl -X POST " & endpoint & " -H \"Content-Type: application/json\" -d '" & requestBody & "'"
Expand Down
22 changes: 22 additions & 0 deletions tests/t_providers_lmstudio.nim
Original file line number Diff line number Diff line change
Expand Up @@ -167,3 +167,25 @@ suite "LMStudio Provider":
let requestJson = parseJson(capturedRequestBody)
check requestJson["model"].getStr() == "model-2"
check result.model == "model-2"

test "chat method sends correct request with json schema":
mockHttpResponse = Response(
status: "200 OK",
bodyStream: newStringStream("""{"choices": [{"message": {"role": "assistant", "content": "{\"tool_code\":\"ls()\"}"}}]}""")
)

let conf = ProviderConfig(key: "", model: none(string), endpoint: none(string))
let provider = newProvider(some(LMStudio), some(conf))
provider.postRequestHandler = mockPostRequestHandler

let schema = %*{"type": "object", "properties": {"tool_code": {"type": "string"}}}
let result = provider.chat(testMessages, model = none(string), jsonMode = true, schema = some(schema))

check capturedUrl == DefaultLMStudioEndpoint
let requestJson = parseJson(capturedRequestBody)
check requestJson.hasKey("response_format")
let responseFormat = requestJson["response_format"]
check responseFormat["type"].getStr() == "json_schema"
check responseFormat["json_schema"]["name"].getStr() == "json_schema"
check responseFormat["json_schema"]["strict"].getBool() == true
check responseFormat["json_schema"]["schema"] == schema