Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion examples/nextjs/.env.local.example
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
# Get your Arcjet key from https://app.arcjet.com
ARCJET_KEY=
ARCJET_KEY=
# Optional for testing the chat route
OPENAI_API_KEY=
171 changes: 171 additions & 0 deletions examples/nextjs/app/chat/page.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,171 @@
"use client";

import { useChat } from "@ai-sdk/react";
import { DefaultChatTransport } from "ai";
import { useState } from "react";

export default function Chat() {
const [input, setInput] = useState("");
const [errorMessage, setErrorMessage] = useState<string | null>(null);

const { messages, sendMessage, status } = useChat({
transport: new DefaultChatTransport({ api: "/chat/test" }),
onError: async (e) => {
setErrorMessage(e.message);
},
});

return (
<main className="page">
<style>{`@keyframes spin { to { transform: rotate(360deg); } }`}</style>
<div className="section">
<h1 className="heading-primary">AI chat</h1>
<p className="typography-primary">
This chat is protected by Arcjet: bot detection blocks automated
clients, a token bucket rate limits AI usage, sensitive information
detection prevents data leaks, and prompt injection detection stops
manipulation attempts.
</p>
</div>

<hr className="divider" />

<div className="section">
<h2 className="heading-secondary">Try it</h2>

{messages.length > 0 && (
<div
style={{
display: "flex",
flexDirection: "column",
gap: "0.75rem",
width: "100%",
}}
>
{messages.map((message) => (
<div
key={message.id}
style={{
display: "flex",
flexDirection: "column",
gap: "0.25rem",
alignSelf:
message.role === "user" ? "flex-end" : "flex-start",
maxWidth: "80%",
}}
>
<span
style={{
fontSize: "0.75rem",
fontWeight: 600,
color: "var(--theme-text-muted)",
textTransform: "uppercase",
letterSpacing: "0.05em",
alignSelf:
message.role === "user" ? "flex-end" : "flex-start",
}}
>
{message.role === "user" ? "You" : "AI"}
</span>
<div
style={{
padding: "0.75rem 1rem",
borderRadius: "0.5rem",
border: "1px solid var(--theme-border-level1)",
backgroundColor:
message.role === "user"
? "var(--theme-foreground)"
: "var(--theme-surface)",
color:
message.role === "user"
? "var(--theme-background)"
: "var(--theme-text-primary)",
fontSize: "0.9375rem",
lineHeight: "1.5rem",
whiteSpace: "pre-wrap",
}}
>
{message.parts.map((part, i) => {
switch (part.type) {
case "text":
return (
<span key={`${message.id}-${i}`}>{part.text}</span>
);
}
})}
</div>
</div>
))}
</div>
)}

{status === "submitted" && (
<div
style={{
display: "flex",
alignItems: "center",
gap: "0.5rem",
fontSize: "0.875rem",
color: "var(--theme-text-muted)",
}}
>
<span
style={{
display: "inline-block",
width: "1rem",
height: "1rem",
border: "2px solid var(--theme-border-level1)",
borderTopColor: "var(--theme-text-muted)",
borderRadius: "50%",
animation: "spin 0.8s linear infinite",
}}
/>
AI is thinking&hellip;
</div>
)}

{errorMessage && (
<div
style={{
fontSize: "0.875rem",
fontWeight: 600,
lineHeight: "1.25rem",
padding: "0.75rem",
borderRadius: "0.5rem",
border: "1px solid #ef4444",
backgroundColor: "#2d0a0a",
color: "#fca5a5",
}}
>
{errorMessage}
</div>
)}

<form
onSubmit={(e) => {
e.preventDefault();
setErrorMessage(null);
sendMessage({ text: input });
setInput("");
}}
className="form form--wide"
>
<div className="form-field">
<label className="form-label">
Message
<input
className="form-input"
value={input}
placeholder="Say something..."
onChange={(e) => setInput(e.currentTarget.value)}
/>
</label>
</div>
<button type="submit" className="button-primary form-button">
Send
</button>
</form>
</div>
</main>
);
}
107 changes: 107 additions & 0 deletions examples/nextjs/app/chat/test/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
import type { UIMessage } from "ai";
import { convertToModelMessages, isTextUIPart, streamText } from "ai";
import { openai } from "@ai-sdk/openai";
import arcjet, {
detectBot,
detectPromptInjection,
sensitiveInfo,
shield,
tokenBucket,
} from "@/lib/arcjet";

// Opt out of caching
export const dynamic = "force-dynamic";

const aj = arcjet
// Shield protects against common web attacks e.g. SQL injection
.withRule(shield({ mode: "LIVE" }))
// Block all automated clients — bots inflate AI costs
.withRule(
detectBot({
mode: "LIVE", // Blocks requests. Use "DRY_RUN" to log only
allow: [], // Block all bots. See https://arcjet.com/bot-list
}),
)
// Enforce budgets to control AI costs. Adjust rates and limits as needed.
.withRule(
tokenBucket({
// Track budgets per user — replace "userId" with any stable identifier
characteristics: ["userId"],
mode: "LIVE", // Blocks requests. Use "DRY_RUN" to log only
refillRate: 2_000, // Refill 2,000 tokens per hour
interval: "1m",
capacity: 5_000, // Maximum 5,000 tokens in the bucket
}),
)
// Block messages containing sensitive information to prevent data leaks
.withRule(
sensitiveInfo({
mode: "LIVE", // Blocks requests. Use "DRY_RUN" to log only
// Block PII types that should never appear in AI prompts.
// Remove types your app legitimately handles (e.g. EMAIL for a support bot).
deny: ["CREDIT_CARD_NUMBER", "EMAIL"],
}),
)
// Detect prompt injection attacks before they reach your AI model
.withRule(
detectPromptInjection({
mode: "LIVE", // Blocks requests. Use "DRY_RUN" to log only
}),
);

export async function POST(req: Request) {
// Replace with your session/auth lookup to get a stable user ID
const userId = "user-123";
const { messages }: { messages: UIMessage[] } = await req.json();
const modelMessages = await convertToModelMessages(messages);

// Estimate token cost: ~1 token per 4 characters of text (rough heuristic).
// For accurate counts use https://www.npmjs.com/package/tiktoken
const totalChars = modelMessages.reduce((sum, m) => {
const content =
typeof m.content === "string" ? m.content : JSON.stringify(m.content);
return sum + content.length;
}, 0);
const estimate = Math.ceil(totalChars / 4);

// Check the most recent user message for sensitive information and prompt injection.
// Pass the full conversation if you want to scan all messages.
const lastMessage: string = (messages.at(-1)?.parts ?? [])
.filter(isTextUIPart)
.map((p) => p.text)
.join(" ");

// Check with Arcjet before calling the AI provider
const decision = await aj.protect(req, {
userId,
requested: estimate,
sensitiveInfoValue: lastMessage,
detectPromptInjectionMessage: lastMessage,
});

if (decision.isDenied()) {
if (decision.reason.isBot()) {
return new Response("Automated clients are not permitted", {
status: 403,
});
} else if (decision.reason.isRateLimit()) {
return new Response("AI usage limit exceeded", { status: 429 });
} else if (decision.reason.isSensitiveInfo()) {
return new Response("Sensitive information detected", { status: 400 });
} else if (decision.reason.isPromptInjection()) {
return new Response(
"Prompt injection detected — please rephrase your message",
{ status: 400 },
);
} else {
return new Response("Forbidden", { status: 403 });
}
}

const result = await streamText({
model: openai("gpt-4o"),
messages: modelMessages,
});

return result.toUIMessageStreamResponse();
}
1 change: 1 addition & 0 deletions examples/nextjs/environment.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,6 @@ declare namespace NodeJS {
readonly ARCJET_KEY: string;
readonly AUTH_SECRET: string;
readonly ARCJET_ENV?: string;
readonly OPENAI_API_KEY?: string;
}
}
2 changes: 2 additions & 0 deletions examples/nextjs/lib/arcjet.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import arcjet, {
sensitiveInfo,
shield,
slidingWindow,
tokenBucket,
} from "@arcjet/next";

// Re-export the rules to simplify imports inside handlers
Expand All @@ -17,6 +18,7 @@ export {
sensitiveInfo,
shield,
slidingWindow,
tokenBucket,
};

// Create a base Arcjet instance for use by each handler
Expand Down
2 changes: 1 addition & 1 deletion examples/nextjs/next-env.d.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
/// <reference types="next" />
/// <reference types="next/image-types/global" />
import "./.next/types/routes.d.ts";
import "./.next/dev/types/routes.d.ts";

// NOTE: This file should not be edited
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
Loading
Loading