Spaces:
Sleeping
Sleeping
dgutierrez
commited on
Commit
•
004bb63
1
Parent(s):
57e70e1
removed qdrant
Browse files- app.py +4 -25
- requirements.txt +1 -2
app.py
CHANGED
@@ -12,8 +12,6 @@ from aimakerspace.vectordatabase import VectorDatabase
|
|
12 |
from aimakerspace.openai_utils.chatmodel import ChatOpenAI
|
13 |
import chainlit as cl
|
14 |
import fitz # PyMuPDF for PDF reading
|
15 |
-
from qdrant_client import QdrantClient
|
16 |
-
from qdrant_client.http.models import PointStruct, VectorParams, Distance
|
17 |
|
18 |
system_template = """\
|
19 |
Use the following context to answer a user's question. If you cannot find the answer in the context, say you don't know the answer."""
|
@@ -29,7 +27,7 @@ Question:
|
|
29 |
user_role_prompt = UserRolePrompt(user_prompt_template)
|
30 |
|
31 |
class RetrievalAugmentedQAPipeline:
|
32 |
-
def __init__(self, llm: ChatOpenAI(), vector_db_retriever) -> None:
|
33 |
self.llm = llm
|
34 |
self.vector_db_retriever = vector_db_retriever
|
35 |
|
@@ -87,29 +85,9 @@ def process_text_file(file: AskFileResponse):
|
|
87 |
texts = text_splitter.split_texts(documents)
|
88 |
return texts
|
89 |
|
90 |
-
async def initialize_vector_db(choice, texts):
|
91 |
-
if choice == "current":
|
92 |
-
vector_db = VectorDatabase()
|
93 |
-
vector_db = await vector_db.abuild_from_list(texts)
|
94 |
-
return vector_db
|
95 |
-
elif choice == "qdrant":
|
96 |
-
client = QdrantClient(":memory:") # Using an in-memory Qdrant instance for demonstration
|
97 |
-
client.recreate_collection(
|
98 |
-
collection_name="my_collection",
|
99 |
-
vectors_config=VectorParams(size=768, distance=Distance.COSINE)
|
100 |
-
)
|
101 |
-
points = [PointStruct(id=i, vector=[0.0] * 768, payload={"text": text}) for i, text in enumerate(texts)]
|
102 |
-
client.upsert(collection_name="my_collection", points=points)
|
103 |
-
return client
|
104 |
|
105 |
@cl.on_chat_start
|
106 |
async def on_chat_start():
|
107 |
-
# Prompt the user to select the vector database
|
108 |
-
user_choice = await cl.AskSelectMessage(
|
109 |
-
content="Which vector database would you like to use?",
|
110 |
-
options=["current", "qdrant"],
|
111 |
-
).send()
|
112 |
-
|
113 |
files = None
|
114 |
|
115 |
# Wait for the user to upload a file
|
@@ -133,8 +111,9 @@ async def on_chat_start():
|
|
133 |
|
134 |
print(f"Processing {len(texts)} text chunks")
|
135 |
|
136 |
-
#
|
137 |
-
vector_db =
|
|
|
138 |
|
139 |
chat_openai = ChatOpenAI()
|
140 |
|
|
|
12 |
from aimakerspace.openai_utils.chatmodel import ChatOpenAI
|
13 |
import chainlit as cl
|
14 |
import fitz # PyMuPDF for PDF reading
|
|
|
|
|
15 |
|
16 |
system_template = """\
|
17 |
Use the following context to answer a user's question. If you cannot find the answer in the context, say you don't know the answer."""
|
|
|
27 |
user_role_prompt = UserRolePrompt(user_prompt_template)
|
28 |
|
29 |
class RetrievalAugmentedQAPipeline:
|
30 |
+
def __init__(self, llm: ChatOpenAI(), vector_db_retriever: VectorDatabase) -> None:
|
31 |
self.llm = llm
|
32 |
self.vector_db_retriever = vector_db_retriever
|
33 |
|
|
|
85 |
texts = text_splitter.split_texts(documents)
|
86 |
return texts
|
87 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
88 |
|
89 |
@cl.on_chat_start
|
90 |
async def on_chat_start():
|
|
|
|
|
|
|
|
|
|
|
|
|
91 |
files = None
|
92 |
|
93 |
# Wait for the user to upload a file
|
|
|
111 |
|
112 |
print(f"Processing {len(texts)} text chunks")
|
113 |
|
114 |
+
# Create a dict vector store
|
115 |
+
vector_db = VectorDatabase()
|
116 |
+
vector_db = await vector_db.abuild_from_list(texts)
|
117 |
|
118 |
chat_openai = ChatOpenAI()
|
119 |
|
requirements.txt
CHANGED
@@ -2,7 +2,6 @@ numpy
|
|
2 |
chainlit==0.7.700
|
3 |
openai
|
4 |
pymupdf
|
5 |
-
|
6 |
-
h2==3.2.0
|
7 |
|
8 |
|
|
|
2 |
chainlit==0.7.700
|
3 |
openai
|
4 |
pymupdf
|
5 |
+
|
|
|
6 |
|
7 |
|