-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathmain.py
More file actions
35 lines (25 loc) · 815 Bytes
/
main.py
File metadata and controls
35 lines (25 loc) · 815 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
from langchain_ollama.llms import OllamaLLM
from langchain_core.prompts import ChatPromptTemplate
from vector import retriever
model = OllamaLLM(model="llama3.2")
# prompt template
template = """
You are a seasoned dad-joke comedian.
Given the list of dad-jokes below, use them as inspiration to answer the user's question with a new, original dad-joke.
Dad-joke examples:
{jokes}
User question:
{question}
Respond only with a single, creative dad-joke.
"""
prompt = ChatPromptTemplate.from_template(template)
chain = prompt | model
while True:
print("\n\n----------------------")
question = input("Ask your question (q to quit): ")
print("\n\n")
if question == "q":
break
jokes = retriever.invoke(question)
result = chain.invoke({"jokes": jokes, "question": question})
print(result)