Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 8 additions & 6 deletions app/api/chat/route.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
import { createOpenAI } from '@ai-sdk/openai';
import { streamText, createDataStreamResponse, generateText, simulateReadableStream, Message } from 'ai';
import cl100k_base from "tiktoken/encoders/cl100k_base.json";
import { Tiktoken } from "tiktoken/lite";

import { apiEndpoint, apiKey, imgGenFnModel, DEFAULT_SYSTEM_PROMPT } from '@/app/config/api';
import { defaultModel, models } from '@/app/config/models';
import { withAuth } from '@/lib/auth';
import { getAvailableModels } from '@/lib/models';
import { generateImageTool } from '@/lib/tools';

const cl100k_base = require("tiktoken/encoders/cl100k_base.json");
const { Tiktoken } = require("tiktoken/lite");

// Allow streaming responses up to 30 seconds
export const maxDuration = 30;

Expand Down Expand Up @@ -49,7 +47,9 @@ async function handlePostRequest(req: Request) {
for (const file of context) {
const tokens = encoding.encode(file.content);
if (tokenCount + tokens.length + 1000 > (selectedModel?.tokenLimit || 128000)) {
console.log(`Token limit reached: ${tokenCount + tokens.length}`);
if (process.env.NODE_ENV === 'development') {
console.log(`Token limit reached: ${tokenCount + tokens.length}`);
}
return new Response('Your files have too much content for this model. Please remove some files or try a different model.', {
status: 400,
headers: {
Expand All @@ -66,7 +66,9 @@ async function handlePostRequest(req: Request) {
const tokens = encoding.encode(message.content);

if (tokenCount + tokens.length + 1000 > (selectedModel?.tokenLimit || 128000)) {
console.log(`Token limit reached: ${tokenCount + tokens.length}`);
if (process.env.NODE_ENV === 'development') {
console.log(`Token limit reached: ${tokenCount + tokens.length}`);
}
break;
}
tokenCount += tokens.length;
Expand Down
25 changes: 22 additions & 3 deletions app/context/ChatContext.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import { Folder, useFolders } from '@/hooks/use-folders';
import { getAccessToken, storeAccessToken, processMessages } from '@/lib/utils';

const SELECTED_MODEL_KEY = 'selectedModel';
const CURRENT_SYSTEM_PROMPT_KEY = 'currentSystemPrompt';

export interface ContextFile {
id: string;
Expand Down Expand Up @@ -62,6 +63,7 @@ interface ChatContextType {
handleInputChange: (e: React.ChangeEvent<HTMLInputElement> | React.ChangeEvent<HTMLTextAreaElement>) => void;
handleSubmit: (e: React.FormEvent<HTMLFormElement>) => Promise<void>;
isLoading: boolean;
status: 'submitted' | 'streaming' | 'ready' | 'error';
contextFiles: ContextFile[];
setContextFiles: (files: ContextFile[]) => void;
reload: () => void;
Expand Down Expand Up @@ -112,13 +114,21 @@ export const ChatProvider: React.FC<{ children: React.ReactNode }> = ({ children
return defaultModel;
};

const getSavedSystemPrompt = () => {
if (typeof window !== 'undefined') {
const savedPrompt = localStorage.getItem(CURRENT_SYSTEM_PROMPT_KEY);
return savedPrompt || DEFAULT_SYSTEM_PROMPT;
}
return DEFAULT_SYSTEM_PROMPT;
};

const [modelSelection, setModelSelection] = useState(getSavedModel);
const [availableModels, setAvailableModels] = useState<Model[]>(defaultModels);
const [isLoadingModels, setIsLoadingModels] = useState(true);
const [modelError, setModelError] = useState<string | null>(null);

// Config state
const [systemPrompt, setSystemPrompt] = useState(DEFAULT_SYSTEM_PROMPT);
const [systemPrompt, setSystemPrompt] = useState(getSavedSystemPrompt);
const [temperature, setTemperature] = useState(0.7);
const [topP, setTopP] = useState(0.95);
const [isConfigOpen, setIsConfigOpen] = useState(false);
Expand All @@ -141,13 +151,14 @@ export const ChatProvider: React.FC<{ children: React.ReactNode }> = ({ children
handleInputChange,
handleSubmit: originalHandleSubmit,
isLoading,
status,
setMessages,
setInput,
reload,
stop,
} = useChat({
api: '/api/chat',
experimental_throttle: 150,
experimental_throttle: 250,
body: {
model: modelSelection,
system: systemPrompt,
Expand Down Expand Up @@ -182,6 +193,13 @@ export const ChatProvider: React.FC<{ children: React.ReactNode }> = ({ children
}
}, [modelSelection]);

// Save system prompt to localStorage
useEffect(() => {
if (typeof window !== 'undefined' && systemPrompt !== undefined) {
localStorage.setItem(CURRENT_SYSTEM_PROMPT_KEY, systemPrompt);
}
}, [systemPrompt]);

// Effect hooks
useEffect(() => {
const init = async () => {
Expand Down Expand Up @@ -329,7 +347,7 @@ export const ChatProvider: React.FC<{ children: React.ReactNode }> = ({ children
if (selectedChat && messages.length > 0) {
updateChat(selectedChat, messages);
}
}, [messages, selectedChat]);
}, [messages, selectedChat, updateChat]);

// Handle access token submission
const handleAccessTokenSubmit = async () => {
Expand Down Expand Up @@ -480,6 +498,7 @@ export const ChatProvider: React.FC<{ children: React.ReactNode }> = ({ children
handleInputChange,
handleSubmit: handleChatSubmit,
isLoading,
status,
contextFiles,
setContextFiles,
reload,
Expand Down
2 changes: 2 additions & 0 deletions app/models/[modelId]/chat/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ export default function ModelDetailPage( {params}: any) {
handleInputChange,
handleSubmit,
isLoading,
status,
contextFiles,
setContextFiles,
reload,
Expand Down Expand Up @@ -202,6 +203,7 @@ export default function ModelDetailPage( {params}: any) {
setMessages={setMessages}
input={input}
isLoading={isLoading}
status={status}
contextFiles={contextFiles}
setContextFiles={setContextFiles}
handleInputChange={handleInputChange}
Expand Down
179 changes: 179 additions & 0 deletions components/animated-markdown.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,179 @@
import React, { useState, useEffect, useRef, memo } from 'react';

import { Markdown } from './markdown';

interface AnimatedMarkdownProps {
children: string;
isLoading: boolean;
status?: 'submitted' | 'streaming' | 'ready' | 'error';
wordsPerSecond?: number;
sectionIndex?: number;
}

const AnimatedMarkdown = ({
children,
isLoading,
status = 'ready',
wordsPerSecond = 15,
sectionIndex = 0
}: AnimatedMarkdownProps) => {
const [displayedContent, setDisplayedContent] = useState('');
const [delayedStart, setDelayedStart] = useState(false);
const previousContentRef = useRef('');
const animationTimeoutRef = useRef<NodeJS.Timeout | null>(null);
const delayTimeoutRef = useRef<NodeJS.Timeout | null>(null);
const currentWordIndexRef = useRef(0);
const throttleDelay = 250;

const shouldAnimate = status === 'streaming' || (isLoading && status !== 'ready');

// Clear animation timeout on unmount
useEffect(() => {
return () => {
if (animationTimeoutRef.current) {
clearTimeout(animationTimeoutRef.current);
}
if (delayTimeoutRef.current) {
clearTimeout(delayTimeoutRef.current);
}
};
}, []);

// Handle sequential animation delays
useEffect(() => {
if (!shouldAnimate) {
setDelayedStart(false);
return;
}

// Clear any existing delay
if (delayTimeoutRef.current) {
clearTimeout(delayTimeoutRef.current);
}

const delay = sectionIndex === 0 ? 0 : 500;

delayTimeoutRef.current = setTimeout(() => {
setDelayedStart(true);
}, delay);

return () => {
if (delayTimeoutRef.current) {
clearTimeout(delayTimeoutRef.current);
}
};
}, [shouldAnimate, sectionIndex]);

// Split text into words while preserving whitespace
const splitIntoWords = (text: string) => {
return text.match(/\S+|\s+/g) || [];
};

// Main animation logic
useEffect(() => {
const currentContent = children;
const previousContent = previousContentRef.current;

// If content is completely different or shorter (regeneration), reset
if (currentContent.length < previousContent.length ||
(previousContent.length > 0 && !currentContent.startsWith(previousContent))) {

// Clear existing animation
if (animationTimeoutRef.current) {
clearTimeout(animationTimeoutRef.current);
}

setDisplayedContent('');
previousContentRef.current = '';
currentWordIndexRef.current = 0;

if (currentContent.length > 0 && shouldAnimate && delayedStart) {
setTimeout(() => startAnimation(currentContent), 50);
} else if (currentContent.length > 0) {
// Show immediately if not animating
setDisplayedContent(currentContent);
previousContentRef.current = currentContent;
}
return;
}

if (currentContent.length > previousContent.length && shouldAnimate && delayedStart) {
startAnimation(currentContent);
}

// If not loading/streaming and content hasn't changed, show everything immediately
if (!shouldAnimate && currentContent === previousContent && displayedContent !== currentContent) {
setDisplayedContent(currentContent);
previousContentRef.current = currentContent;
}

// Only show content immediately on error, not on ready
if (status === 'error' && displayedContent !== currentContent) {
if (animationTimeoutRef.current) {
clearTimeout(animationTimeoutRef.current);
}
setDisplayedContent(currentContent);
previousContentRef.current = currentContent;
}
}, [children, isLoading, status, shouldAnimate, delayedStart]);

const startAnimation = (targetContent: string) => {
if (animationTimeoutRef.current) {
clearTimeout(animationTimeoutRef.current);
}

// Update the previous content reference immediately to prevent race conditions
previousContentRef.current = targetContent;

const currentWords = splitIntoWords(displayedContent);
const targetWords = splitIntoWords(targetContent);

const startWordIndex = currentWords.length;
currentWordIndexRef.current = startWordIndex;

// Calculate adaptive animation speed based on remaining words and throttle timing
const remainingWords = targetWords.length - startWordIndex;
const adaptiveWordsPerSecond = Math.max(wordsPerSecond, remainingWords / (throttleDelay / 1000) * 0.7); // Complete in 70% of throttle time

const animateNextWord = () => {
const currentWordIndex = currentWordIndexRef.current;

if (currentWordIndex < targetWords.length) {
// Add the next word to displayed content
const wordsToShow = targetWords.slice(0, currentWordIndex + 1);
const newDisplayed = wordsToShow.join('');
setDisplayedContent(newDisplayed);
currentWordIndexRef.current = currentWordIndex + 1;

const delay = 1000 / adaptiveWordsPerSecond;
animationTimeoutRef.current = setTimeout(animateNextWord, delay);
} else {
// Animation complete
setDisplayedContent(targetContent);
}
};

animateNextWord();
};

// Reset when starting a new message
useEffect(() => {
if ((isLoading || status === 'submitted') && children === '') {
setDisplayedContent('');
previousContentRef.current = '';
currentWordIndexRef.current = 0;

if (animationTimeoutRef.current) {
clearTimeout(animationTimeoutRef.current);
}
}
}, [isLoading, status, children]);

return (
<div className="relative">
<Markdown>{displayedContent}</Markdown>
</div>
);
};

export default memo(AnimatedMarkdown);
2 changes: 2 additions & 0 deletions components/chat/chat-home.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ export function ChatHome() {
handleInputChange,
handleSubmit,
isLoading,
status,
contextFiles,
setContextFiles,
reload,
Expand Down Expand Up @@ -100,6 +101,7 @@ export function ChatHome() {
messages={messages}
input={input}
isLoading={isLoading}
status={status}
contextFiles={contextFiles}
setContextFiles={setContextFiles}
handleInputChange={handleInputChange}
Expand Down
Loading