forked from AI-LLM-Bootcamp/04-simple-agent
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path001-simple-agent.py
More file actions
90 lines (57 loc) · 2.23 KB
/
001-simple-agent.py
File metadata and controls
90 lines (57 loc) · 2.23 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import os
from dotenv import load_dotenv, find_dotenv
_ = load_dotenv(find_dotenv())
openai_api_key = os.environ["OPENAI_API_KEY"]
from langchain_openai import ChatOpenAI
llm = ChatOpenAI(model="gpt-3.5-turbo")
from langchain_community.tools.tavily_search import TavilySearchResults
search = TavilySearchResults(max_results=2)
response = search.invoke("Who are the top stars of the 2024 Eurocup?")
print("\n----------\n")
print("Who are the top stars of the 2024 Eurocup?")
print("\n----------\n")
print(response)
print("\n----------\n")
tools = [search]
llm_with_tools = llm.bind_tools(tools)
from langgraph.prebuilt import create_react_agent
agent_executor = create_react_agent(llm, tools)
from langchain_core.messages import HumanMessage
response = agent_executor.invoke({"messages": [HumanMessage(content="Where is the soccer Eurocup 2024 played?")]})
print("\n----------\n")
print("Where is the soccer Eurocup 2024 played? (agent)")
print("\n----------\n")
print(response["messages"])
print("\n----------\n")
print("\n----------\n")
print("When and where will it be the 2024 Eurocup final match? (agent with streaming)")
print("\n----------\n")
for chunk in agent_executor.stream(
{"messages": [HumanMessage(content="When and where will it be the 2024 Eurocup final match?")]}
):
print(chunk)
print("----")
print("\n----------\n")
from langgraph.checkpoint.memory import MemorySaver
memory = MemorySaver()
agent_executor = create_react_agent(llm, tools, checkpointer=memory)
config = {"configurable": {"thread_id": "001"}}
print("Who won the 2024 soccer Eurocup?")
for chunk in agent_executor.stream(
{"messages": [HumanMessage(content="Who won the 2024 soccer Eurocup?")]}, config
):
print(chunk)
print("----")
print("Who were the top stars of that winner team?")
for chunk in agent_executor.stream(
{"messages": [HumanMessage(content="Who were the top stars of that winner team?")]}, config
):
print(chunk)
print("----")
print("(With new thread_id) About what soccer team we were talking?")
config = {"configurable": {"thread_id": "002"}}
for chunk in agent_executor.stream(
{"messages": [HumanMessage(content="About what soccer team we were talking?")]}, config
):
print(chunk)
print("----")