From ac4c505bad54bfadf09273a3d75d551ac7177e5e Mon Sep 17 00:00:00 2001 From: Andrew Hyatt Date: Thu, 25 Dec 2025 09:51:54 -0500 Subject: [PATCH 1/2] Added gpt-oss model and enabled low/medium/high reasoning for it Fixes https://github.com/ahyatt/llm/issues/229 --- NEWS.org | 1 + llm-models.el | 7 ++++++- llm-ollama.el | 14 ++++++++++---- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/NEWS.org b/NEWS.org index 45f49bb..3e92e0a 100644 --- a/NEWS.org +++ b/NEWS.org @@ -1,5 +1,6 @@ * Version 0.28.4 - Fixed Ollama reasoning, whose API has changed +- Added gpt-oss, supported low/medium/high reasoning with Ollama * Version 0.28.3 - Fixed breakage in Ollama streaming tool calling - Fixed incorrect Ollama streaming tool use capability reporting diff --git a/llm-models.el b/llm-models.el index 5c999f9..c1bcb2b 100644 --- a/llm-models.el +++ b/llm-models.el @@ -377,7 +377,12 @@ REGEX is a regular expression that can be used to identify the model, uniquely ( :name "BGE-M3" :symbol 'bge-m3 :capabilities '(embedding free-software) ;; MIT license :context-length 8192 - :regex "bge-m3"))) + :regex "bge-m3") + (make-llm-model + :name "gpt-oss" :symbol 'gpt-oss + :capabilities '(generation free-software reasoning tool-use) ; Apache license + :context-length 128000 + :regex "gpt-oss"))) (defun llm-models-by-symbol (symbol) "Return the model with SYMBOL." diff --git a/llm-ollama.el b/llm-ollama.el index deca689..2a713fd 100644 --- a/llm-ollama.el +++ b/llm-ollama.el @@ -162,11 +162,17 @@ PROVIDER is the llm-ollama provider." (llm-ollama--response-format (llm-chat-prompt-response-format prompt))))) (setq request-plist (plist-put request-plist :stream (if streaming t :false))) - (when (llm-chat-prompt-reasoning prompt) + (when (and (llm-chat-prompt-reasoning prompt) + (not (eq 'none (llm-chat-prompt-reasoning prompt)))) (setq request-plist (plist-put request-plist :think - (if (eq 'none (llm-chat-prompt-reasoning prompt)) - :false - 't)))) + (if (eq 'gpt-oss (llm-models-match (llm-ollama-chat-model provider))) + (pcase (llm-chat-prompt-reasoning prompt) + ('light "low") + ('medium "medium") + ('maximum "high")) + (if (eq 'none (llm-chat-prompt-reasoning prompt)) + :false + 't))))) (when (llm-chat-prompt-temperature prompt) (setq options (plist-put options :temperature (llm-chat-prompt-temperature prompt)))) (when (llm-chat-prompt-max-tokens prompt) From aa698ca9b219d9627699d9459fc4c5fe1fb563e1 Mon Sep 17 00:00:00 2001 From: Andrew Hyatt Date: Thu, 25 Dec 2025 11:26:20 -0500 Subject: [PATCH 2/2] Add reasoning tag unless the model has no reasoning --- llm-ollama.el | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/llm-ollama.el b/llm-ollama.el index 9e8cf59..3e90099 100644 --- a/llm-ollama.el +++ b/llm-ollama.el @@ -188,17 +188,19 @@ PROVIDER is the llm-ollama provider." (llm-ollama--response-format (llm-chat-prompt-response-format prompt))))) (setq request-plist (plist-put request-plist :stream (if streaming t :false))) - (when (and (llm-chat-prompt-reasoning prompt) - (not (eq 'none (llm-chat-prompt-reasoning prompt)))) - (setq request-plist (plist-put request-plist :think - (if (eq 'gpt-oss (llm-models-match (llm-ollama-chat-model provider))) - (pcase (llm-chat-prompt-reasoning prompt) - ('light "low") - ('medium "medium") - ('maximum "high")) - (if (eq 'none (llm-chat-prompt-reasoning prompt)) - :false - 't))))) + (let ((model (llm-models-match (llm-ollama-chat-model provider)))) + (when (and (llm-chat-prompt-reasoning prompt) + (member 'reasoning (llm-model-capabilities model)) + (not (eq 'none (llm-chat-prompt-reasoning prompt)))) + (setq request-plist (plist-put request-plist :think + (if (eq 'gpt-oss model) + (pcase (llm-chat-prompt-reasoning prompt) + ('light "low") + ('medium "medium") + ('maximum "high")) + (if (eq 'none (llm-chat-prompt-reasoning prompt)) + :false + 't)))))) (when (llm-chat-prompt-temperature prompt) (setq options (plist-put options :temperature (llm-chat-prompt-temperature prompt)))) (when (llm-chat-prompt-max-tokens prompt)