From a7f5bf5cf016be7e65096f159559900723d14521 Mon Sep 17 00:00:00 2001 From: Avnish Kumar Date: Tue, 13 Jan 2026 22:45:47 +0530 Subject: [PATCH] fix: InteractiveExecutor not generating tokens due to early loop exit The PostProcess method in InteractiveExecutor was always returning true for breakGeneration, causing the inference loop to exit immediately after the first iteration before any tokens could be generated. Changed the final return statement to return false, allowing generation to continue until a proper stop condition is met (anti-prompt, EOS token, or max tokens reached). --- LLama/LLamaInteractExecutor.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LLama/LLamaInteractExecutor.cs b/LLama/LLamaInteractExecutor.cs index e7cac4c47..95dc58355 100644 --- a/LLama/LLamaInteractExecutor.cs +++ b/LLama/LLamaInteractExecutor.cs @@ -236,7 +236,7 @@ private void PreprocessLlava(string text, InferStateArgs args, bool addBos = tru args.WaitForInput = true; } - return Task.FromResult((true, (IReadOnlyList)[])); + return Task.FromResult((false, (IReadOnlyList)[])); } ///