-
Notifications
You must be signed in to change notification settings - Fork 18
Expand file tree
/
Copy path001-simple-chatbot.py
More file actions
104 lines (68 loc) · 2.07 KB
/
001-simple-chatbot.py
File metadata and controls
104 lines (68 loc) · 2.07 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
import warnings
from langchain._api import LangChainDeprecationWarning
warnings.simplefilter("ignore", category=LangChainDeprecationWarning)
import os
from dotenv import load_dotenv, find_dotenv
_ = load_dotenv(find_dotenv())
openai_api_key = os.environ["OPENAI_API_KEY"]
from langchain_openai import ChatOpenAI
chatbot = ChatOpenAI(model="gpt-3.5-turbo")
from langchain_core.messages import HumanMessage
messagesToTheChatbot = [
HumanMessage(content="My favorite color is blue."),
]
response = chatbot.invoke(messagesToTheChatbot)
print("\n----------\n")
print("My favorite color is blue.")
print("\n----------\n")
print(response.content)
print("\n----------\n")
response = chatbot.invoke([
HumanMessage(content="What is my favorite color?")
])
print("\n----------\n")
print("What is my favorite color?")
print("\n----------\n")
print(response.content)
print("\n----------\n")
from langchain import LLMChain
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.prompts import HumanMessagePromptTemplate
from langchain_core.prompts import MessagesPlaceholder
from langchain.memory import ConversationBufferMemory
from langchain.memory import FileChatMessageHistory
memory = ConversationBufferMemory(
chat_memory=FileChatMessageHistory("messages.json"),
memory_key="messages",
return_messages=True
)
prompt = ChatPromptTemplate(
input_variables=["content", "messages"],
messages=[
MessagesPlaceholder(variable_name="messages"),
HumanMessagePromptTemplate.from_template("{content}")
]
)
chain = LLMChain(
llm=chatbot,
prompt=prompt,
memory=memory
)
response = chain.invoke("hello!")
print("\n----------\n")
print("hello!")
print("\n----------\n")
print(response)
print("\n----------\n")
response = chain.invoke("my name is Julio")
print("\n----------\n")
print("my name is Julio")
print("\n----------\n")
print(response)
print("\n----------\n")
response = chain.invoke("what is my name?")
print("\n----------\n")
print("what is my name?")
print("\n----------\n")
print(response)
print("\n----------\n")