Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -5,6 +5,7 @@ from langchain.llms import openai
|
|
5 |
from langchain.chains import ConversationalRetrievalChain
|
6 |
from langchain.chat_models import ChatOpenAI
|
7 |
from langchain.embeddings import HuggingFaceBgeEmbeddings
|
|
|
8 |
###########################################################################################
|
9 |
|
10 |
def get_pdf_load():
|
@@ -39,6 +40,7 @@ def get_conversation(query_user):
|
|
39 |
query=query_1+query_user
|
40 |
result= qa_chain({'question': query, 'chat_history': chat_history})
|
41 |
st.write('Answer of you question:' +result['answer'] +'\n')
|
|
|
42 |
####################################################################################################################
|
43 |
def main():
|
44 |
st.set_page_config(
|
@@ -59,6 +61,7 @@ def main():
|
|
59 |
if st.button("Answer"):
|
60 |
with st.spinner("Answering"):
|
61 |
get_conversation(query_user=user_question)
|
|
|
62 |
|
63 |
|
64 |
#if st.button("CLEAR"):
|
|
|
5 |
from langchain.chains import ConversationalRetrievalChain
|
6 |
from langchain.chat_models import ChatOpenAI
|
7 |
from langchain.embeddings import HuggingFaceBgeEmbeddings
|
8 |
+
import streamlit as st
|
9 |
###########################################################################################
|
10 |
|
11 |
def get_pdf_load():
|
|
|
40 |
query=query_1+query_user
|
41 |
result= qa_chain({'question': query, 'chat_history': chat_history})
|
42 |
st.write('Answer of you question:' +result['answer'] +'\n')
|
43 |
+
return result
|
44 |
####################################################################################################################
|
45 |
def main():
|
46 |
st.set_page_config(
|
|
|
61 |
if st.button("Answer"):
|
62 |
with st.spinner("Answering"):
|
63 |
get_conversation(query_user=user_question)
|
64 |
+
|
65 |
|
66 |
|
67 |
#if st.button("CLEAR"):
|