Mishab commited on
Commit
6f77302
1 Parent(s): b40228a

Updated knowledge base

Browse files
Database/PDF_HTML_CHROMA_DB/{e9e678e2-95d4-4b02-8a1e-7c3ca59754d7 → afcfdab3-6ee2-42e5-9f7b-284c077ed4f7}/index_metadata.pickle RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a37d72452ff59cb80ed779d0ff9ed91f9d6fe7c12adf909845168311e578c06b
3
- size 2956679
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:58fc99aa696c76842b672da2d335a63ef6bbc8ae19d01e503405ba46468ea2c8
3
+ size 1390301
Database/PDF_HTML_CHROMA_DB/chroma.sqlite3 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2e31d552a7a4981d60910ac3e293b5d53d0ba9503a95933ca21ab3a20b64ebc8
3
- size 330657792
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:486da555d90974b1f3fc0114737cc5d3a1b5057bb42e560f1d9d6dc13d7534be
3
+ size 205963264
Database/PDF_HTML_CHROMA_DB/e9e678e2-95d4-4b02-8a1e-7c3ca59754d7/data_level0.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8dbd22f72c4c63507f549d3fe1d8350c50ba0bc9d64cc20f1d136119fb9a892e
3
- size 85476000
 
 
 
 
Database/PDF_HTML_CHROMA_DB/e9e678e2-95d4-4b02-8a1e-7c3ca59754d7/header.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3f2080adbc1c9cd3e086e238928ed1f139b21a0ebad87348b410770e6a45b37e
3
- size 100
 
 
 
 
Database/PDF_HTML_CHROMA_DB/e9e678e2-95d4-4b02-8a1e-7c3ca59754d7/length.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a50b6a6ebd0528902d0cbaa4d5d1c60af3a3fdc95a0738162eef134668c4d735
3
- size 204000
 
 
 
 
Database/PDF_HTML_CHROMA_DB/e9e678e2-95d4-4b02-8a1e-7c3ca59754d7/link_lists.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8b00b245cd31e8691cd94191f3afefc59417c252bdabc5ec443aa58cf84328d4
3
- size 426496
 
 
 
 
utils.py CHANGED
@@ -322,14 +322,14 @@ def load_conversational_retrievel_chain(retriever, llm):
322
  # Helpful Answer:"""
323
 
324
  # prompt = PromptTemplate(input_variables=["history", "context", "question"], template=template)
325
- memory = ConversationBufferMemory(input_key="question", memory_key="history")
326
 
327
  qa = RetrievalQA.from_chain_type(
328
  llm=llm,
329
  chain_type="stuff",
330
  retriever=retriever,
331
  return_source_documents=True,
332
- chain_type_kwargs={"memory": memory},
333
  )
334
  return qa
335
 
 
322
  # Helpful Answer:"""
323
 
324
  # prompt = PromptTemplate(input_variables=["history", "context", "question"], template=template)
325
+ # memory = ConversationBufferMemory(input_key="question", memory_key="history")
326
 
327
  qa = RetrievalQA.from_chain_type(
328
  llm=llm,
329
  chain_type="stuff",
330
  retriever=retriever,
331
  return_source_documents=True,
332
+ # chain_type_kwargs={"memory": memory},
333
  )
334
  return qa
335