-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathai_host.py
More file actions
49 lines (42 loc) · 1.35 KB
/
ai_host.py
File metadata and controls
49 lines (42 loc) · 1.35 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
from langchain.prompts import (
ChatPromptTemplate,
HumanMessagePromptTemplate,
MessagesPlaceholder,
)
from langchain.schema import SystemMessage
from langchain_openai import ChatOpenAI
from langchain.chains import LLMChain
from langchain.memory import ConversationBufferMemory, RedisChatMessageHistory
from langchain.prompts import PromptTemplate
from langchain_openai import OpenAI
from dotenv import load_dotenv
load_dotenv()
REDIS_URL = 'redis://localhost:6379'
MODEL = 'gpt-3.5-turbo'
def chat(system_prompt, session_id, message):
system_prompt = system_prompt
prompt = ChatPromptTemplate.from_messages(
[
SystemMessage(
content=system_prompt
),
MessagesPlaceholder(
variable_name="chat_history"
),
HumanMessagePromptTemplate.from_template(
"{human_input}"
),
]
)
message_history = RedisChatMessageHistory(session_id,
url=REDIS_URL)
memory = ConversationBufferMemory(
memory_key="chat_history", chat_memory=message_history, return_messages=True)
llm = ChatOpenAI()
chat_llm_chain = LLMChain(
llm=llm,
prompt=prompt,
verbose=True,
memory=memory,
)
return chat_llm_chain.predict(human_input=message)