devve1 commited on
Commit
48ffedb
1 Parent(s): 8324d7c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -161,10 +161,10 @@ def main(query: str, client: QdrantClient, collection_name: str, llm, dense_mode
161
  ], stop=["</s>"], temperature=0.7, max_tokens=3000)
162
 
163
  answer = output['choices'][0]['message']['content']
164
- final_response = f"{answer}\n\n\n\nSources :\n\n{result_metadatas}"
165
 
166
  print(f'OUTPUT: {output}')
167
- return final_response
168
 
169
  @st.cache_resource
170
  def load_models_and_documents():
@@ -366,15 +366,15 @@ if __name__ == '__main__':
366
  st.chat_message("user").markdown(prompt)
367
  st.session_state.messages.append({"role": "user", "content": prompt})
368
 
369
- ai_response = main(prompt, client, collection_name, llm, dense_model, sparse_model)
370
  with st.chat_message("assistant"):
371
  message_placeholder = st.empty()
372
  full_response = ""
373
- for chunk in re.split(r'(\s+)', ai_response):
374
  full_response += chunk + " "
375
  time.sleep(0.01)
376
  message_placeholder.markdown(full_response + "▌")
377
- st.session_state.messages.append({"role": "assistant", "content": full_response})
378
 
379
  st.sidebar.title("Upload your file")
380
  uploaded_files = st.sidebar.file_uploader("Choose a file", accept_multiple_files=True, type=['docx', 'doc', 'odt', 'pptx', 'ppt', 'xlsx', 'csv', 'tsv', 'eml', 'msg', 'rtf', 'epub', 'html', 'xml', 'pdf', 'png', 'jpg', 'heic','txt'])
 
161
  ], stop=["</s>"], temperature=0.7, max_tokens=3000)
162
 
163
  answer = output['choices'][0]['message']['content']
164
+ answer_with_metadatas = f"{answer}\n\n\n\nSources :\n\n{result_metadatas}"
165
 
166
  print(f'OUTPUT: {output}')
167
+ return answer, answer_with_metadatas
168
 
169
  @st.cache_resource
170
  def load_models_and_documents():
 
366
  st.chat_message("user").markdown(prompt)
367
  st.session_state.messages.append({"role": "user", "content": prompt})
368
 
369
+ ai_response, ai_response_with_metadatas = main(prompt, client, collection_name, llm, dense_model, sparse_model)
370
  with st.chat_message("assistant"):
371
  message_placeholder = st.empty()
372
  full_response = ""
373
+ for chunk in re.split(r'(\s+)', ai_response_with_metadatas):
374
  full_response += chunk + " "
375
  time.sleep(0.01)
376
  message_placeholder.markdown(full_response + "▌")
377
+ st.session_state.messages.append({"role": "assistant", "content": ai_response})
378
 
379
  st.sidebar.title("Upload your file")
380
  uploaded_files = st.sidebar.file_uploader("Choose a file", accept_multiple_files=True, type=['docx', 'doc', 'odt', 'pptx', 'ppt', 'xlsx', 'csv', 'tsv', 'eml', 'msg', 'rtf', 'epub', 'html', 'xml', 'pdf', 'png', 'jpg', 'heic','txt'])