From 461573738069f4fdb8c76ead5ed993920d1d9026 Mon Sep 17 00:00:00 2001 From: Edgar Ruiz <77294576+edgararuiz@users.noreply.github.com> Date: Mon, 6 Oct 2025 11:51:17 -0500 Subject: [PATCH 1/2] Ensures 'custom' function sends the prompt and current value to Ollama --- r/R/llm-custom.R | 2 ++ r/R/m-backend-prompt.R | 12 ++++++++++++ r/R/m-vec-prompt.R | 19 ++++++++++--------- 3 files changed, 24 insertions(+), 9 deletions(-) diff --git a/r/R/llm-custom.R b/r/R/llm-custom.R index 8700fdf..7e60751 100644 --- a/r/R/llm-custom.R +++ b/r/R/llm-custom.R @@ -60,7 +60,9 @@ llm_custom.data.frame <- function(.data, llm_vec_custom <- function(x, prompt = "", valid_resps = NULL) { m_vec_prompt( x = x, + prompt_label = "custom", prompt = prompt, + custom_prompt = prompt, valid_resps = valid_resps ) } diff --git a/r/R/m-backend-prompt.R b/r/R/m-backend-prompt.R index 6f2353d..a02a98a 100644 --- a/r/R/m-backend-prompt.R +++ b/r/R/m-backend-prompt.R @@ -11,6 +11,12 @@ m_backend_prompt.mall_ollama <- function(backend, additional = "") { additional, "The answer is based on the following text:\n{{x}}" )) + next_method$custom <- function(custom_prompt) { + glue(paste( + "{custom_prompt}", + "{additional}" + )) + } next_method } @@ -22,6 +28,12 @@ m_backend_prompt.mall_ellmer <- function(backend, additional = "") { "The answer will be based on each individual prompt.", "Treat each prompt as unique when deciding the answer." )) + next_method$custom <- function(custom_prompt) { + glue(paste( + "{custom_prompt}", + "{additional}" + )) + } next_method } diff --git a/r/R/m-vec-prompt.R b/r/R/m-vec-prompt.R index 6df4af3..6808b2b 100644 --- a/r/R/m-vec-prompt.R +++ b/r/R/m-vec-prompt.R @@ -8,16 +8,17 @@ m_vec_prompt <- function(x, ...) { # Initializes session LLM backend <- llm_use(.silent = TRUE, .force = FALSE) - # If there is no 'prompt', then assumes that we're looking for a - # prompt label (sentiment, classify, etc) to set 'prompt' - if (is.null(prompt)) { - defaults <- m_backend_prompt( - backend = backend, - additional = additional_prompt - ) - fn <- defaults[[prompt_label]] - prompt <- fn(...) + + # Builds the prompt and will be sent to the LLM + defaults <- m_backend_prompt( + backend = backend, + additional = additional_prompt + ) + fn <- defaults[[prompt_label]] + if(!is.null(fn)) { + prompt <- fn(...) } + # Submits final prompt to the LLM resp <- m_backend_submit( backend = backend, From f041aa2aa2f69375d5853b53354b7f59c0d18e89 Mon Sep 17 00:00:00 2001 From: Edgar Ruiz <77294576+edgararuiz@users.noreply.github.com> Date: Mon, 6 Oct 2025 12:09:31 -0500 Subject: [PATCH 2/2] Updates test snapshot, news and version bump --- r/DESCRIPTION | 2 +- r/NEWS.md | 4 ++++ r/tests/testthat/_snaps/llm-custom.md | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/r/DESCRIPTION b/r/DESCRIPTION index cc9360d..7567855 100644 --- a/r/DESCRIPTION +++ b/r/DESCRIPTION @@ -1,7 +1,7 @@ Package: mall Title: Run Multiple Large Language Model Predictions Against a Table, or Vectors -Version: 0.2.0 +Version: 0.2.0.9000 Authors@R: c( person("Edgar", "Ruiz", , "edgar@posit.co", role = c("aut", "cre")), person(given = "Posit Software, PBC", role = c("cph", "fnd")) diff --git a/r/NEWS.md b/r/NEWS.md index e8bb876..c88433d 100644 --- a/r/NEWS.md +++ b/r/NEWS.md @@ -1,3 +1,7 @@ +# mall (dev) + +* Fix for missing content when using custom prompt with Ollama directly (#62) + # mall 0.2.0 * Adds integration with `ellmer` `Chat` objects diff --git a/r/tests/testthat/_snaps/llm-custom.md b/r/tests/testthat/_snaps/llm-custom.md index c658a62..5e1342a 100644 --- a/r/tests/testthat/_snaps/llm-custom.md +++ b/r/tests/testthat/_snaps/llm-custom.md @@ -8,7 +8,7 @@ 2 I regret buying this laptop. It is too slow and the keyboard is too noisy 3 Not sure how to feel about my new washing machine. Great color, but hard to figure .pred - 1 No + 1 Yes 2 No 3 No