Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/great-lamps-boil.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@mrck-labs/grid-core": patch
---

fix tool caling again
73 changes: 58 additions & 15 deletions packages/core/src/factories/configurable-agent.factory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -774,8 +774,8 @@ export const createConfigurableAgent = async ({
let fullText = "";

// Call streaming LLM with tools
// Tool events are sent via sendUpdate callback in onStepFinish
const { textStream, generation } =
// Use fullStream to capture all events including tool calls
const { fullStream, generation } =
await base.llmService.runStreamedLLMWithTools({
messages: workingMessages,
tools: availableTools,
Expand All @@ -795,21 +795,64 @@ export const createConfigurableAgent = async ({
...mergedLlmOptions,
});

// Stream text chunks
for await (const chunk of textStream) {
fullText += chunk;
// Process the full stream which includes text, tool calls, and tool results
for await (const part of fullStream) {
// Handle text deltas
if (part.type === "text-delta") {
fullText += part.textDelta;

// Yield chunk to caller
yield {
type: "text_delta",
content: part.textDelta,
};

// Also send via sendUpdate for IPC
await sendUpdate({
type: "text_delta",
content: part.textDelta,
});
}

// Yield chunk to caller
yield {
type: "text_delta",
content: chunk,
};
// Handle tool calls - emit when tool is being called
if (part.type === "tool-call") {
const toolCallData = {
toolCallId: part.toolCallId,
toolName: part.toolName,
args: part.args,
};

yield {
type: "tool_execution",
content: JSON.stringify(toolCallData),
metadata: toolCallData,
};

await sendUpdate({
type: "tool_execution",
content: JSON.stringify(toolCallData),
});
}

// Also send via sendUpdate for IPC
await sendUpdate({
type: "text_delta",
content: chunk,
});
// Handle tool results - emit when tool returns
if (part.type === "tool-result") {
const toolResultData = {
toolCallId: part.toolCallId,
toolName: part.toolName,
result: part.result,
};

yield {
type: "tool_response",
content: JSON.stringify(toolResultData),
metadata: toolResultData,
};

await sendUpdate({
type: "tool_response",
content: JSON.stringify(toolResultData),
});
}
}

// Build final response
Expand Down
27 changes: 11 additions & 16 deletions packages/core/src/services/base.llm.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -392,6 +392,7 @@ export const baseLLMService = (
options: LLMServiceOptions & { tools?: any[] }
): Promise<{
textStream: AsyncIterable<string>;
fullStream: AsyncIterable<any>;
generation: any;
}> => {
const {
Expand Down Expand Up @@ -446,7 +447,7 @@ export const baseLLMService = (
// Forward provider-specific options
...(providerOptions ? { providerOptions: providerOptions as any } : {}),
onStepFinish: (step) => {
// Tool telemetry
// Tool telemetry - fires when each step completes
step.content.forEach((content) => {
if (content.type === "tool-call") {
const sc: any = content;
Expand All @@ -463,35 +464,29 @@ export const baseLLMService = (
toolName,
args
);
if (sendUpdate) {
sendUpdate({
type: "tool_execution",
content: JSON.stringify(content),
});
}
}

if (content.type === "tool-result") {
const sc: any = content;
const toolCallId = sc.toolCallId ?? sc.id ?? sc.callId;
const result = sc.result ?? sc.output ?? sc.data;
const resultData = sc.result ?? sc.output ?? sc.data;
langfuse.endToolSpanForSession(
options.context.sessionToken,
toolCallId,
result
resultData
);
if (sendUpdate) {
sendUpdate({
type: "tool_response",
content: JSON.stringify(content),
});
}
}
});
},
});

return { textStream: result.textStream, generation };
// Return both textStream and fullStream
// fullStream contains all events including tool-call and tool-result
return {
textStream: result.textStream,
fullStream: result.fullStream,
generation,
};
};

const isAvailable = async (): Promise<boolean> => {
Expand Down
1 change: 1 addition & 0 deletions packages/core/src/types/llm.types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ export interface LLMService {
options: LLMServiceOptions & { tools?: any[] }
): Promise<{
textStream: AsyncIterable<string>;
fullStream: AsyncIterable<any>;
generation: any;
}>;

Expand Down
Loading