Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions api/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
FROM python:3.12

WORKDIR /code
COPY ./pyproject.toml /code/pyproject.toml
# Get files required by hatch to create the environment
COPY ./src/chatbot/__about__.py /code/src/chatbot/__about__.py
COPY ./README.md /code/README.md
RUN python -m pip install --upgrade pip && pip install hatch
RUN hatch env create
COPY ./api /code/api

CMD ["hatch", "run", "uvicorn", "api.main:app", "--host", "0.0.0.0", "--port", "8080"]
Empty file added api/__init__.py
Empty file.
93 changes: 93 additions & 0 deletions api/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
from fastapi import FastAPI
from langgraph_sdk import get_client
from pydantic import BaseModel

app = FastAPI()

langgraph_client = get_client(url="http://langgraph-api:8000")


class ChatMessage(BaseModel):
text: str
model_config = {
"json_schema_extra": {
"examples": [
{
"text": "Hello",
}
]
}
}


class HumanReview(BaseModel):
action: str
data: str
model_config = {
"json_schema_extra": {
"examples": [
{
"action": "feedback",
"data": "That's not what I meant! Please try again.",
},
{
"action": "continue",
"data": "",
},
]
}
}


@app.get("/")
async def read_main():
return {"msg": "Hello! Welcome to the LangGraph Chat API"}


@app.get("/chat")
async def list_chat_threads():
"""
List all chat threads.
"""
return await langgraph_client.threads.search(
metadata={"graph_id": "chat"},
)


@app.get("/chat/{thread_id}")
async def get_chat_history(thread_id: str):
"""
Get the chat history for the given thread.
"""
return await langgraph_client.threads.get(thread_id=thread_id)


@app.post("/chat/{thread_id}")
async def chat_with_thread(thread_id: str, message: ChatMessage):
"""
Take message from user and return response from chatbot.
If chat thread does not exist, create a new thread.
"""
return await langgraph_client.runs.wait(
thread_id=thread_id,
assistant_id="chat",
input={"messages": [{"role": "user", "content": message.text}]},
if_not_exists="create",
)


@app.post("/chat/{thread_id}/human_review")
async def human_review(thread_id: str, review: HumanReview):
"""
Submit human review to resume interrupted thread.
"""
return await langgraph_client.runs.wait(
thread_id=thread_id,
assistant_id="chat",
command={
"resume": {
"action": review.action,
"data": review.data,
}
},
)
11 changes: 11 additions & 0 deletions api/test_main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from fastapi.testclient import TestClient

from .main import app

client = TestClient(app)


def test_read_main():
response = client.get("/")
assert response.status_code == 200
assert response.json() == {"msg": "Hello! Welcome to the LangGraph Chat API"}
4 changes: 4 additions & 0 deletions compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -39,3 +39,7 @@ services:
environment:
REDIS_URI: redis://langgraph-redis:6379
POSTGRES_URI: postgres://postgres:postgres@langgraph-postgres:5432/postgres?sslmode=disable
api:
image: chatbot-api:latest
ports:
- "8080:8080"
7 changes: 5 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,9 @@ dependencies = [
"pydantic",
"python-dotenv",
"langchain-tavily",
"grandalf"
"grandalf",
"fastapi[standard]",
"uvicorn[standard]",
]

[project.urls]
Expand Down Expand Up @@ -66,7 +68,7 @@ lint.ignore = ["E501"]
exclude = ["__pycache__", "build", "dist", ".venv"]

[tool.coverage.run]
source_pkgs = ["chatbot", "tests"]
source_pkgs = ["chatbot", "tests", "api"]
branch = true
parallel = true
omit = [
Expand All @@ -76,6 +78,7 @@ omit = [
[tool.coverage.paths]
chatbot = ["src/chatbot", "*/chatbot/src/chatbot"]
tests = ["tests", "*/chatbot/tests"]
api = ["api", "*/chatbot/api"]

[tool.coverage.report]
exclude_lines = [
Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion src/chatbot/__about__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# SPDX-FileCopyrightText: 2025-present Nam Le <lehainam2371999@gmail.com>
#
# SPDX-License-Identifier: MIT
__version__ = "0.0.5"
__version__ = "0.0.6"
3 changes: 0 additions & 3 deletions src/chatbot/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

from dotenv import load_dotenv
from langchain_core.messages import AIMessage
from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import END, START, StateGraph

from chatbot.utils.nodes.human_review import human_review_node
Expand Down Expand Up @@ -42,8 +41,6 @@ def route_after_chatbot(state: State) -> Literal[END, "human_review", "tools"]:
)
graph_builder.add_edge("tools", "chatbot")

memory = MemorySaver()
# graph = graph_builder.compile(checkpointer=memory) # In memory saver should not be used when deployed using LangGraph
graph = graph_builder.compile()

graph.get_graph().print_ascii()