Updated code
Browse files
utils.py
CHANGED
@@ -257,9 +257,8 @@ def load_ensemble_retriver(text_chunks, embeddings, chroma_vectorstore):
|
|
257 |
bm25_retriever.k = 1
|
258 |
chroma_retriever = chroma_vectorstore.as_retriever(search_kwargs={"k": 1})
|
259 |
ensemble_retriever = EnsembleRetriever(retrievers=[bm25_retriever, chroma_retriever], weights=[0.3, 0.7])
|
260 |
-
retriever_from_llm = MultiQueryRetriever.from_llm(retriever=ensemble_retriever, llm=ChatOpenAI()
|
261 |
-
|
262 |
-
return retriever_from_llm
|
263 |
|
264 |
|
265 |
def load_conversational_retrievel_chain(retriever, llm):
|
@@ -311,7 +310,7 @@ def load_conversational_retrievel_chain(retriever, llm):
|
|
311 |
Helpful Answer:"""
|
312 |
|
313 |
prompt = PromptTemplate(input_variables=["history", "context", "question"], template=template)
|
314 |
-
memory = ConversationBufferWindowMemory(input_key="question", memory_key="history", k=
|
315 |
|
316 |
qa = RetrievalQA.from_chain_type(
|
317 |
llm=llm,
|
|
|
257 |
bm25_retriever.k = 1
|
258 |
chroma_retriever = chroma_vectorstore.as_retriever(search_kwargs={"k": 1})
|
259 |
ensemble_retriever = EnsembleRetriever(retrievers=[bm25_retriever, chroma_retriever], weights=[0.3, 0.7])
|
260 |
+
# retriever_from_llm = MultiQueryRetriever.from_llm(retriever=ensemble_retriever, llm=ChatOpenAI())
|
261 |
+
return ensemble_retriever
|
|
|
262 |
|
263 |
|
264 |
def load_conversational_retrievel_chain(retriever, llm):
|
|
|
310 |
Helpful Answer:"""
|
311 |
|
312 |
prompt = PromptTemplate(input_variables=["history", "context", "question"], template=template)
|
313 |
+
memory = ConversationBufferWindowMemory(input_key="question", memory_key="history", k=1)
|
314 |
|
315 |
qa = RetrievalQA.from_chain_type(
|
316 |
llm=llm,
|