-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathchat_engine.py
More file actions
42 lines (33 loc) · 1.75 KB
/
chat_engine.py
File metadata and controls
42 lines (33 loc) · 1.75 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
#from langchain_groq import ChatGroq
from langchain_ollama import ChatOllama
from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import START, MessagesState, StateGraph
from langchain_core.messages import SystemMessage
class ChatBot:
def __init__(self, groq_api_key: str):
self.groq_api_key = groq_api_key
self.model_name = "llama-3.2-3b-preview"
self.llm = self._setup_llm()
self.app = self._setup_workflow()
def _setup_llm(self):
#return ChatGroq(temperature=0.1, groq_api_key=self.groq_api_key, model_name=self.model_name)
retrun ChatOllama(model="mistral:latest", base_url='http://127.0.0.1:11434',)
def _setup_workflow(self, system_prompt="You are a helpful IT and CloudOPS assistant. Respond in French."):
workflow = StateGraph(state_schema=MessagesState)
def call_model(state: MessagesState):
# Add system message at the beginning of the conversation
sys_prompt = SystemMessage(content=system_prompt)
modified_messages = [sys_prompt] + state["messages"]
response = self.llm.invoke(modified_messages)
return {"messages": response}
workflow.add_edge(START, "model")
workflow.add_node("model", call_model)
return workflow.compile(checkpointer=MemorySaver())
def chat(self, message: str, thread_id: str = 'guest', system_prompt=None):
# If a custom system prompt is provided, recreate the workflow
if system_prompt:
self.app = self._setup_workflow(system_prompt)
return self.app.invoke(
{"messages": [{"role": "user", "content": message}]},
{"configurable": {"thread_id": thread_id}}
)