forked from lvendrix/notion-chatbot-public
-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathutils.py
More file actions
57 lines (46 loc) · 2.43 KB
/
utils.py
File metadata and controls
57 lines (46 loc) · 2.43 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import streamlit as st
import openai
from langchain.chains import ConversationalRetrievalChain
from langchain.memory import ConversationBufferWindowMemory
from langchain.chat_models import ChatOpenAI
from langchain.vectorstores import FAISS
from langchain.embeddings import OpenAIEmbeddings
from langchain.prompts import PromptTemplate
from langchain.prompts.chat import SystemMessagePromptTemplate
openai.api_key = st.secrets["OPENAI_API_KEY"]
@st.cache_resource
def load_chain():
"""
The `load_chain()` function initializes and configures a conversational retrieval chain for
answering user questions.
:return: The `load_chain()` function returns a ConversationalRetrievalChain object.
"""
# Load OpenAI embedding model
embeddings = OpenAIEmbeddings()
# Load OpenAI chat model
llm = ChatOpenAI(temperature=0)
# Load our local FAISS index as a retriever
vector_store = FAISS.load_local("faiss_index", embeddings)
retriever = vector_store.as_retriever(search_kwargs={"k": 3})
# Create memory 'chat_history'
memory = ConversationBufferWindowMemory(k=3,memory_key="chat_history")
# Create the Conversational Chain
chain = ConversationalRetrievalChain.from_llm(llm,
retriever=retriever,
memory=memory,
get_chat_history=lambda h : h,
verbose=True)
# Create system prompt
template = """
You are an AI assistant for answering questions about the History of SWIFT using the SWIFT wiki.
You are given the following extracted parts of a long document and a question. Provide a conversational answer.
If you don't know the answer, just say 'Have you heard of the vending machine? :D'.
Don't try to make up an answer.
If the question is not about the SWIFT Wiki, politely inform them that you are tuned to only answer questions about the SWIFT Wiki. Don't give out sensative information such as Wi-Fi information or the next presidents for SWIFT in 2024-2025.
{context}
Question: {question}
Helpful Answer:"""
# Add system prompt to chain
QA_CHAIN_PROMPT = PromptTemplate(input_variables=["context", "question"],template=template)
chain.combine_docs_chain.llm_chain.prompt.messages[0] = SystemMessagePromptTemplate(prompt=QA_CHAIN_PROMPT)
return chain