Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,5 @@ __pycache__/
.pytest_cache
.ruff_cache
.langgraph_api
dist
dist
ui/node_modules
101 changes: 14 additions & 87 deletions api/main.py
Original file line number Diff line number Diff line change
@@ -1,93 +1,20 @@
from fastapi import FastAPI
from langgraph_sdk import get_client
from pydantic import BaseModel

app = FastAPI()

langgraph_client = get_client(url="http://langgraph-api:8000")


class ChatMessage(BaseModel):
text: str
model_config = {
"json_schema_extra": {
"examples": [
{
"text": "Hello",
}
]
}
}


class HumanReview(BaseModel):
action: str
data: str
model_config = {
"json_schema_extra": {
"examples": [
{
"action": "feedback",
"data": "That's not what I meant! Please try again.",
},
{
"action": "continue",
"data": "",
},
]
}
}
from fastapi.middleware.cors import CORSMiddleware

from .routers import router

@app.get("/")
async def read_main():
return {"msg": "Hello! Welcome to the LangGraph Chat API"}


@app.get("/chat")
async def list_chat_threads():
"""
List all chat threads.
"""
return await langgraph_client.threads.search(
metadata={"graph_id": "chat"},
)


@app.get("/chat/{thread_id}")
async def get_chat_history(thread_id: str):
"""
Get the chat history for the given thread.
"""
return await langgraph_client.threads.get(thread_id=thread_id)

app = FastAPI()

@app.post("/chat/{thread_id}")
async def chat_with_thread(thread_id: str, message: ChatMessage):
"""
Take message from user and return response from chatbot.
If chat thread does not exist, create a new thread.
"""
return await langgraph_client.runs.wait(
thread_id=thread_id,
assistant_id="chat",
input={"messages": [{"role": "user", "content": message.text}]},
if_not_exists="create",
)
origins = [
"http://localhost:3000",
]

app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)

@app.post("/chat/{thread_id}/human_review")
async def human_review(thread_id: str, review: HumanReview):
"""
Submit human review to resume interrupted thread.
"""
return await langgraph_client.runs.wait(
thread_id=thread_id,
assistant_id="chat",
command={
"resume": {
"action": review.action,
"data": review.data,
}
},
)
app.include_router(router)
48 changes: 48 additions & 0 deletions api/models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
from pydantic import BaseModel


class ChatMessage(BaseModel):
text: str
model_config = {
"json_schema_extra": {
"examples": [
{
"text": "Hello",
}
]
}
}


class HumanReview(BaseModel):
action: str
data: str
model_config = {
"json_schema_extra": {
"examples": [
{
"action": "feedback",
"data": "That's not what I meant! Please try again.",
},
{
"action": "continue",
"data": "",
},
]
}
}


class Thread(BaseModel):
thread_id: str
status: str
model_config = {
"json_schema_extra": {
"examples": [
{
"thread_id": "ffbbe00c-c65d-437e-892d-a4b59120e3c9",
"status": "idle",
},
]
}
}
98 changes: 98 additions & 0 deletions api/routers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
from typing import Any

from fastapi import APIRouter
from langgraph_sdk import get_client

from .models import ChatMessage, HumanReview, Thread

router = APIRouter()
langgraph_client = get_client(url="http://langgraph-api:8000")


def parse_ai_response(
ai_response: list[dict] | dict[str, Any],
) -> dict[str, Any]:
if (
not isinstance(ai_response, dict)
or not {"__interrupt__", "messages"} & ai_response.keys()
):
raise ValueError(
"Unexpected response format from LangGraph API. Expected a dictionary with key 'messages' or '__interrupt__'."
)
if "__interrupt__" in ai_response:
return {
"type": "interrupt",
"data": ai_response["__interrupt__"][-1]["value"]["tool_call"]["args"][
"query"
],
}
return {"type": "message", "data": ai_response["messages"][-1]["content"]}


@router.get("/")
async def read_main() -> dict:
return {"msg": "Hello! Welcome to the LangGraph Chat API"}


@router.get("/chat")
async def list_chat_threads() -> list[Thread]:
threads_data = await langgraph_client.threads.search(
metadata={"graph_id": "chat"},
)
return [
Thread(thread_id=thread["thread_id"], status=thread["status"])
for thread in threads_data
]


@router.get("/chat/{thread_id}")
async def get_chat_history(thread_id: str) -> dict[str, list[str]]:
thread_data = await langgraph_client.threads.get(thread_id=thread_id)
if not isinstance(thread_data["values"], dict):
raise ValueError(
"Unexpected response format from LangGraph API. Expected a dictionary with key 'values'."
)
return {
"history": [
message["content"]
for message in thread_data["values"]["messages"]
if message["type"] in ["human", "ai"] and message["content"] != ""
]
}


@router.get("/chat/{thread_id}/debug")
async def get_chat_history_debug(thread_id: str):
return await langgraph_client.threads.get(thread_id=thread_id)


@router.post("/chat/{thread_id}")
async def chat_with_thread(thread_id: str, message: ChatMessage) -> dict[str, Any]:
ai_response = await langgraph_client.runs.wait(
thread_id=thread_id,
assistant_id="chat",
input={"messages": [{"role": "user", "content": message.text}]},
if_not_exists="create",
)
return parse_ai_response(ai_response)


@router.post("/chat/{thread_id}/human_review")
async def human_review(thread_id: str, review: HumanReview) -> dict[str, Any]:
ai_response = await langgraph_client.runs.wait(
thread_id=thread_id,
assistant_id="chat",
command={
"resume": {
"action": review.action,
"data": review.data,
}
},
)
return parse_ai_response(ai_response)


@router.delete("/chat/{thread_id}")
async def delete_chat_thread(thread_id: str) -> dict[str, Any]:
await langgraph_client.threads.delete(thread_id=thread_id)
return {"success": True, "thread_id": thread_id}
2 changes: 1 addition & 1 deletion src/chatbot/__about__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# SPDX-FileCopyrightText: 2025-present Nam Le <lehainam2371999@gmail.com>
#
# SPDX-License-Identifier: MIT
__version__ = "0.0.6"
__version__ = "0.0.7"
41 changes: 41 additions & 0 deletions ui/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.

# dependencies
/node_modules
/.pnp
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/versions

# testing
/coverage

# next.js
/.next/
/out/

# production
/build

# misc
.DS_Store
*.pem

# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*

# env files (can opt-in for committing if needed)
.env*

# vercel
.vercel

# typescript
*.tsbuildinfo
next-env.d.ts
36 changes: 36 additions & 0 deletions ui/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
This is a [Next.js](https://nextjs.org) project bootstrapped with [`create-next-app`](https://nextjs.org/docs/app/api-reference/cli/create-next-app).

## Getting Started

First, run the development server:

```bash
npm run dev
# or
yarn dev
# or
pnpm dev
# or
bun dev
```

Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.

You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.

This project uses [`next/font`](https://nextjs.org/docs/app/building-your-application/optimizing/fonts) to automatically optimize and load [Geist](https://vercel.com/font), a new font family for Vercel.

## Learn More

To learn more about Next.js, take a look at the following resources:

- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.

You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js) - your feedback and contributions are welcome!

## Deploy on Vercel

The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.

Check out our [Next.js deployment documentation](https://nextjs.org/docs/app/building-your-application/deploying) for more details.
Loading