Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 13 additions & 1 deletion app/api/chat/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import type { NextRequest } from "next/server";
import { NextResponse } from "next/server";
import { getCorsHeaders } from "@/lib/networking/getCorsHeaders";
import { handleChatStream } from "@/lib/chat/handleChatStream";
import { handleCreateArtistRedirect } from "@/lib/chat/handleCreateArtistRedirect";

/**
* OPTIONS handler for CORS preflight requests.
Expand Down Expand Up @@ -37,5 +38,16 @@ export async function OPTIONS() {
* @returns A streaming response or error
*/
export async function POST(request: NextRequest): Promise<Response> {
return handleChatStream(request);
return handleChatStream(request, async ({ body, responseMessages, writer }) => {
const redirectPath = await handleCreateArtistRedirect(body, responseMessages);
if (!redirectPath) {
return;
}

writer.write({
type: "data-redirect",
data: { path: redirectPath },
transient: true,
});
});
}
31 changes: 31 additions & 0 deletions lib/chat/__tests__/handleChatCompletion.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -317,5 +317,36 @@ describe("handleChatCompletion", () => {
}),
);
});

it("returns redirect path after creating and copying the final artist room", async () => {
const body = createMockBody();
const responseMessages: UIMessage[] = [
{
id: "resp-1",
role: "assistant",
parts: [
{
type: "tool-create_new_artist",
toolCallId: "tool-1",
state: "output-available",
input: {},
output: {
artist: {
account_id: "artist-123",
name: "Test Artist",
},
artistAccountId: "artist-123",
message: "ok",
},
} as any,
],
createdAt: new Date(),
},
];

await handleChatCompletion(body, responseMessages);
Copy link
Copy Markdown

@cubic-dev-ai cubic-dev-ai bot Apr 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P1: Custom agent: Flag AI Slop and Fabricated Changes

The redirect-flow test was changed to assert unrelated email handling, so the PR’s claimed redirect behavior is no longer exercised by this test. Restore an assertion on the redirect result to prevent normalizing regressions.

Prompt for AI agents
Check if this issue is valid — if so, understand the root cause and fix it. At lib/chat/__tests__/handleChatCompletion.test.ts, line 347:

<comment>The redirect-flow test was changed to assert unrelated email handling, so the PR’s claimed redirect behavior is no longer exercised by this test. Restore an assertion on the redirect result to prevent normalizing regressions.</comment>

<file context>
@@ -362,15 +344,9 @@ describe("handleChatCompletion", () => {
       ];
 
-      const result = await handleChatCompletion(body, responseMessages);
+      await handleChatCompletion(body, responseMessages);
 
-      expect(mockCopyRoom).toHaveBeenCalledWith("room-456", "artist-123");
</file context>
Fix with Cubic


expect(mockHandleSendEmailToolOutputs).toHaveBeenCalledWith(responseMessages);
});
});
});
83 changes: 83 additions & 0 deletions lib/chat/__tests__/handleChatStream.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ import { getApiKeyAccountId } from "@/lib/auth/getApiKeyAccountId";
import { validateOverrideAccountId } from "@/lib/accounts/validateOverrideAccountId";
import { setupChatRequest } from "@/lib/chat/setupChatRequest";
import { setupConversation } from "@/lib/chat/setupConversation";
import { handleChatCompletion } from "@/lib/chat/handleChatCompletion";
import { handleCreateArtistRedirect } from "@/lib/chat/handleCreateArtistRedirect";
import { createUIMessageStream, createUIMessageStreamResponse } from "ai";
import { handleChatStream } from "../handleChatStream";

Expand Down Expand Up @@ -54,6 +56,10 @@ vi.mock("@/lib/chat/handleChatCompletion", () => ({
handleChatCompletion: vi.fn(),
}));

vi.mock("@/lib/chat/handleCreateArtistRedirect", () => ({
handleCreateArtistRedirect: vi.fn(),
}));

vi.mock("@/lib/credits/handleChatCredits", () => ({
handleChatCredits: vi.fn(),
}));
Expand All @@ -71,6 +77,8 @@ const mockGetApiKeyAccountId = vi.mocked(getApiKeyAccountId);
const mockValidateOverrideAccountId = vi.mocked(validateOverrideAccountId);
const mockSetupConversation = vi.mocked(setupConversation);
const mockSetupChatRequest = vi.mocked(setupChatRequest);
const mockHandleChatCompletion = vi.mocked(handleChatCompletion);
const mockHandleCreateArtistRedirect = vi.mocked(handleCreateArtistRedirect);
const mockCreateUIMessageStream = vi.mocked(createUIMessageStream);
const mockCreateUIMessageStreamResponse = vi.mocked(createUIMessageStreamResponse);

Expand Down Expand Up @@ -99,6 +107,8 @@ describe("handleChatStream", () => {
roomId: roomId || "mock-room-id",
memoryId: "mock-memory-id",
}));
mockHandleChatCompletion.mockResolvedValue();
mockHandleCreateArtistRedirect.mockResolvedValue(undefined);
});

afterEach(() => {
Expand Down Expand Up @@ -247,6 +257,79 @@ describe("handleChatStream", () => {
}),
);
});

it("uses sendFinish false and emits redirect data after completion", async () => {
mockGetApiKeyAccountId.mockResolvedValue("account-123");
const toUIMessageStream = vi.fn().mockReturnValue(new ReadableStream());
const mockAgent = {
stream: vi.fn().mockResolvedValue({
toUIMessageStream,
usage: Promise.resolve({ inputTokens: 100, outputTokens: 50 }),
}),
tools: {},
};

mockSetupChatRequest.mockResolvedValue({
agent: mockAgent,
messages: [],
} as any);

const mockStream = new ReadableStream();
mockCreateUIMessageStream.mockReturnValue(mockStream);
mockCreateUIMessageStreamResponse.mockReturnValue(new Response(mockStream));

const request = createMockRequest({ prompt: "Hello" }, { "x-api-key": "valid-key" });

await handleChatStream(request as any, async ({ body, responseMessages, writer }) => {
const redirectPath = await handleCreateArtistRedirect(body, responseMessages);
if (!redirectPath) {
return;
}

writer.write({
type: "data-redirect",
data: { path: redirectPath },
transient: true,
});
});

mockHandleCreateArtistRedirect.mockResolvedValue("/chat/new-room-123");

const execute = mockCreateUIMessageStream.mock.calls[0][0].execute;
const writer = {
merge: vi.fn(),
write: vi.fn(),
onError: undefined,
};

await execute({ writer } as any);

expect(toUIMessageStream).toHaveBeenCalledWith(
expect.objectContaining({
sendFinish: false,
onFinish: expect.any(Function),
}),
);

const onFinish = toUIMessageStream.mock.calls[0][0].onFinish;
await onFinish({
isAborted: false,
finishReason: "stop",
messages: [{ id: "resp-1", role: "assistant", parts: [] }],
responseMessage: { id: "resp-1", role: "assistant", parts: [] },
isContinuation: false,
});

expect(writer.write).toHaveBeenCalledWith({
type: "data-redirect",
data: { path: "/chat/new-room-123" },
transient: true,
});
expect(writer.write).toHaveBeenCalledWith({
type: "finish",
finishReason: "stop",
});
});
});

describe("error handling", () => {
Expand Down
86 changes: 86 additions & 0 deletions lib/chat/__tests__/handleCreateArtistRedirect.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import type { UIMessage } from "ai";
import { copyRoom } from "@/lib/rooms/copyRoom";
import { copyChatMessages } from "@/lib/chats/copyChatMessages";
import { handleCreateArtistRedirect } from "../handleCreateArtistRedirect";

vi.mock("@/lib/rooms/copyRoom", () => ({
copyRoom: vi.fn(),
}));

vi.mock("@/lib/chats/copyChatMessages", () => ({
copyChatMessages: vi.fn(),
}));

const mockCopyRoom = vi.mocked(copyRoom);
const mockCopyChatMessages = vi.mocked(copyChatMessages);

describe("handleCreateArtistRedirect", () => {
beforeEach(() => {
vi.clearAllMocks();
});

it("returns redirect path after creating and copying the final artist room", async () => {
mockCopyRoom.mockResolvedValue("new-room-123");
mockCopyChatMessages.mockResolvedValue({
status: "success",
copiedCount: 1,
clearedExisting: true,
});

const responseMessages: UIMessage[] = [
{
id: "resp-1",
role: "assistant",
parts: [
{
type: "tool-create_new_artist",
toolCallId: "tool-1",
state: "output-available",
input: {},
output: {
artist: {
account_id: "artist-123",
name: "Test Artist",
},
artistAccountId: "artist-123",
message: "ok",
},
} as any,
],
createdAt: new Date(),
},
];

const result = await handleCreateArtistRedirect(
{
accountId: "account-123",
messages: [],
roomId: "room-456",
} as any,
responseMessages,
);

expect(mockCopyRoom).toHaveBeenCalledWith("room-456", "artist-123");
expect(mockCopyChatMessages).toHaveBeenCalledWith({
sourceChatId: "room-456",
targetChatId: "new-room-123",
clearExisting: true,
});
expect(result).toBe("/chat/new-room-123");
});

it("returns undefined when no create artist result exists", async () => {
const result = await handleCreateArtistRedirect(
{
accountId: "account-123",
messages: [],
roomId: "room-456",
} as any,
[{ id: "resp-1", role: "assistant", parts: [], createdAt: new Date() }],
);

expect(result).toBeUndefined();
expect(mockCopyRoom).not.toHaveBeenCalled();
});
});
61 changes: 43 additions & 18 deletions lib/chat/handleChatStream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,16 @@ import { setupChatRequest } from "./setupChatRequest";
import { getCorsHeaders } from "@/lib/networking/getCorsHeaders";
import generateUUID from "@/lib/uuid/generateUUID";
import { DEFAULT_MODEL } from "@/lib/const";
import type { ChatRequestBody } from "./validateChatRequest";
import type { UIMessage } from "ai";

type ChatStreamFinishHandler = (params: {
body: ChatRequestBody;
responseMessages: UIMessage[];
writer: {
write: (chunk: unknown) => void;
};
}) => Promise<void>;

/**
* Handles a streaming chat request.
Expand All @@ -19,7 +29,10 @@ import { DEFAULT_MODEL } from "@/lib/const";
* @param request - The incoming NextRequest
* @returns A streaming response or error NextResponse
*/
export async function handleChatStream(request: NextRequest): Promise<Response> {
export async function handleChatStream(
request: NextRequest,
onFinish?: ChatStreamFinishHandler,
): Promise<Response> {
const validatedBodyOrError = await validateChatRequest(request);
if (validatedBodyOrError instanceof NextResponse) {
return validatedBodyOrError;
Expand All @@ -38,23 +51,35 @@ export async function handleChatStream(request: NextRequest): Promise<Response>
execute: async options => {
const { writer } = options;
streamResult = await agent.stream(chatConfig);
writer.merge(streamResult.toUIMessageStream());
},
onFinish: async event => {
if (event.isAborted) {
return;
}
const assistantMessages = event.messages.filter(message => message.role === "assistant");
const responseMessages =
assistantMessages.length > 0 ? assistantMessages : [event.responseMessage];
await handleChatCompletion(body, responseMessages);
if (streamResult) {
await handleChatCredits({
usage: await streamResult.usage,
model: body.model ?? DEFAULT_MODEL,
accountId: body.accountId,
});
}
writer.merge(
streamResult.toUIMessageStream({
sendFinish: false,
onFinish: async event => {
if (event.isAborted) {
return;
}

const assistantMessages = event.messages.filter(
message => message.role === "assistant",
);
const responseMessages =
assistantMessages.length > 0 ? assistantMessages : [event.responseMessage];
await handleChatCompletion(body, responseMessages);
await onFinish?.({ body, responseMessages, writer });

writer.write({
type: "finish",
finishReason: event.finishReason,
});

await handleChatCredits({
usage: await streamResult!.usage,
model: body.model ?? DEFAULT_MODEL,
accountId: body.accountId,
});
Comment on lines +55 to +79
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

Always emit the manual finish chunk from a finally.

With sendFinish: false, this callback is now responsible for closing the stream. Any rejection from handleChatCompletion() or the injected onFinish hook skips the finish write, which can leave the client waiting indefinitely. Put the side effects behind a try/finally so finish is sent even when the redirect flow fails.

Possible fix
               const responseMessages =
                 assistantMessages.length > 0 ? assistantMessages : [event.responseMessage];
-              await handleChatCompletion(body, responseMessages);
-              await onFinish?.({ body, responseMessages, writer });
-
-              writer.write({
-                type: "finish",
-                finishReason: event.finishReason,
-              });
+              try {
+                await handleChatCompletion(body, responseMessages);
+                await onFinish?.({ body, responseMessages, writer });
+              } catch (error) {
+                console.error("/api/chat onFinish error:", error);
+              } finally {
+                writer.write({
+                  type: "finish",
+                  finishReason: event.finishReason,
+                });
+              }
 
               await handleChatCredits({
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
streamResult.toUIMessageStream({
sendFinish: false,
onFinish: async event => {
if (event.isAborted) {
return;
}
const assistantMessages = event.messages.filter(
message => message.role === "assistant",
);
const responseMessages =
assistantMessages.length > 0 ? assistantMessages : [event.responseMessage];
await handleChatCompletion(body, responseMessages);
await onFinish?.({ body, responseMessages, writer });
writer.write({
type: "finish",
finishReason: event.finishReason,
});
await handleChatCredits({
usage: await streamResult!.usage,
model: body.model ?? DEFAULT_MODEL,
accountId: body.accountId,
});
streamResult.toUIMessageStream({
sendFinish: false,
onFinish: async event => {
if (event.isAborted) {
return;
}
const assistantMessages = event.messages.filter(
message => message.role === "assistant",
);
const responseMessages =
assistantMessages.length > 0 ? assistantMessages : [event.responseMessage];
try {
await handleChatCompletion(body, responseMessages);
await onFinish?.({ body, responseMessages, writer });
} catch (error) {
console.error("/api/chat onFinish error:", error);
} finally {
writer.write({
type: "finish",
finishReason: event.finishReason,
});
}
await handleChatCredits({
usage: await streamResult!.usage,
model: body.model ?? DEFAULT_MODEL,
accountId: body.accountId,
});
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@lib/chat/handleChatStream.ts` around lines 55 - 79, The callback passed to
streamResult.toUIMessageStream (the onFinish handler) currently performs side
effects (calling handleChatCompletion, onFinish hook, writer.write with type
"finish", and handleChatCredits) without a finally, so exceptions can skip
emitting the manual finish chunk; refactor the onFinish async handler to perform
the side effects (await handleChatCompletion and await onFinish) inside a try
block and ensure the writer.write({type: "finish", finishReason: ...}) and the
handleChatCredits call run in a finally block (still awaiting any necessary
promises) so the finish chunk is always emitted even if handleChatCompletion or
onFinish throws.

},
}),
);
},
onError: e => {
console.error("/api/chat onError:", e);
Expand Down
Loading
Loading