-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path09_llamaindex_sql.py
More file actions
59 lines (44 loc) · 1.76 KB
/
09_llamaindex_sql.py
File metadata and controls
59 lines (44 loc) · 1.76 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
from llama_index.core.settings import Settings
from llama_index.readers.database import DatabaseReader
from llama_index.core.utilities.sql_wrapper import SQLDatabase
from llama_index.core.storage.docstore import SimpleDocumentStore
from llama_index.vector_stores.faiss.base import FaissVectorStore
from llama_index.embeddings.openai import OpenAIEmbedding
from llama_index.llms.openai import OpenAI
from llama_index.core import VectorStoreIndex
import os
import faiss
from sqlalchemy.types import UserDefinedType
from sqlalchemy.dialects.postgresql.base import ischema_names
class PGVector(UserDefinedType):
def get_col_spec(self):
return "vector"
def bind_processor(self, dialect):
return lambda value: value
def result_processor(self, dialect, coltype):
return lambda value: value
ischema_names["vector"] = PGVector
os.environ["OPENAI_API_KEY"] = "xxx"
supabase_uri = "postgresql://postgres:postgres@127.0.0.1:54322/postgres"
db = SQLDatabase.from_uri(supabase_uri)
query = "SELECT * FROM chat_history"
sql_reader = DatabaseReader(sql_database=db)
docs = sql_reader.load_data(query=query)
embed_model = OpenAIEmbedding(model="text-embedding-ada-002")
dimension = 1536
faiss_index = faiss.IndexFlatL2(dimension)
vector_store = FaissVectorStore(faiss_index=faiss_index)
doc_store = SimpleDocumentStore()
index = VectorStoreIndex.from_documents(
docs, embed_model=embed_model, vector_store=vector_store
)
index.storage_context.persist(persist_dir="./storage")
llm = OpenAI(model="gpt-4o", temperature=0.7)
Settings.llm = llm
query_engine = index.as_query_engine()
while True:
user_query = input("You: ")
if user_query.lower() == "exit":
break
response = query_engine.query(user_query)
print(f"\nResponse: {response}\n")