Spaces:
Sleeping
Sleeping
KrishnaKumar23
commited on
Commit
β’
46c9b40
1
Parent(s):
e797f63
Handled session state bug on page refresh
Browse files
app.py
CHANGED
@@ -35,11 +35,10 @@ def index_document(_llm_object, uploaded_file):
|
|
35 |
|
36 |
with st.spinner("Indexing document... This is a free CPU version and may take a while β³"):
|
37 |
retriever = _llm_object.create_vector_db(file_name)
|
38 |
-
|
39 |
-
|
40 |
-
return file_name
|
41 |
else:
|
42 |
-
return None
|
43 |
|
44 |
|
45 |
def load_lottieurl(url: str):
|
@@ -68,11 +67,12 @@ def init_state() :
|
|
68 |
if "history" not in st.session_state:
|
69 |
st.session_state.history = [SYSTEM_PROMPT]
|
70 |
|
71 |
-
if "repetion_penalty" not in st.session_state
|
72 |
st.session_state.repetion_penalty = 1
|
73 |
|
74 |
-
if "chat_bot" not in st.session_state
|
75 |
st.session_state.chat_bot = "Mixtral-8x7B-Instruct-v0.1"
|
|
|
76 |
|
77 |
|
78 |
def faq():
|
@@ -125,7 +125,7 @@ def sidebar():
|
|
125 |
st.markdown("---")
|
126 |
# Upload file through Streamlit
|
127 |
st.session_state.uploaded_file = st.file_uploader("Upload a file", type=["pdf", "doc", "docx", "txt"])
|
128 |
-
index_document(st.session_state.llm_object, st.session_state.uploaded_file)
|
129 |
|
130 |
st.markdown("---")
|
131 |
st.markdown("# About")
|
@@ -145,6 +145,7 @@ def sidebar():
|
|
145 |
)
|
146 |
|
147 |
faq()
|
|
|
148 |
|
149 |
|
150 |
def chat_box() :
|
@@ -153,11 +154,11 @@ def chat_box() :
|
|
153 |
st.markdown(message["content"])
|
154 |
|
155 |
|
156 |
-
def generate_chat_stream(prompt) :
|
157 |
|
158 |
with st.spinner("Fetching relevant answers from source document..."):
|
159 |
response, sources = st.session_state.llm_object.mixtral_chat_inference(prompt, st.session_state.history, st.session_state.temperature,
|
160 |
-
st.session_state.top_p, st.session_state.repetition_penalty,
|
161 |
|
162 |
|
163 |
return response, sources
|
@@ -186,15 +187,15 @@ def load_model():
|
|
186 |
return llm_model.LlmModel()
|
187 |
|
188 |
st.set_page_config(page_title="Document QA Bot")
|
189 |
-
lottie_book = load_lottieurl("https://assets4.lottiefiles.com/temp/lf20_aKAfIn.json")
|
190 |
-
st_lottie(lottie_book, speed=1, height=200, key="initial")
|
191 |
# Place the title below the Lottie animation
|
192 |
st.title("Document Q&A Bot π€")
|
193 |
|
194 |
# initialize session state for streamlit app
|
195 |
init_state()
|
196 |
# Left Sidebar
|
197 |
-
sidebar()
|
198 |
chat_box()
|
199 |
|
200 |
if prompt := st.chat_input("Ask a question about your document!"):
|
@@ -202,7 +203,7 @@ if prompt := st.chat_input("Ask a question about your document!"):
|
|
202 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
203 |
|
204 |
try:
|
205 |
-
chat_stream, sources = generate_chat_stream(prompt)
|
206 |
|
207 |
with st.chat_message("assistant"):
|
208 |
placeholder = st.empty()
|
|
|
35 |
|
36 |
with st.spinner("Indexing document... This is a free CPU version and may take a while β³"):
|
37 |
retriever = _llm_object.create_vector_db(file_name)
|
38 |
+
|
39 |
+
return file_name, retriever
|
|
|
40 |
else:
|
41 |
+
return None, None
|
42 |
|
43 |
|
44 |
def load_lottieurl(url: str):
|
|
|
67 |
if "history" not in st.session_state:
|
68 |
st.session_state.history = [SYSTEM_PROMPT]
|
69 |
|
70 |
+
if "repetion_penalty" not in st.session_state:
|
71 |
st.session_state.repetion_penalty = 1
|
72 |
|
73 |
+
if "chat_bot" not in st.session_state:
|
74 |
st.session_state.chat_bot = "Mixtral-8x7B-Instruct-v0.1"
|
75 |
+
|
76 |
|
77 |
|
78 |
def faq():
|
|
|
125 |
st.markdown("---")
|
126 |
# Upload file through Streamlit
|
127 |
st.session_state.uploaded_file = st.file_uploader("Upload a file", type=["pdf", "doc", "docx", "txt"])
|
128 |
+
_, retriever = index_document(st.session_state.llm_object, st.session_state.uploaded_file)
|
129 |
|
130 |
st.markdown("---")
|
131 |
st.markdown("# About")
|
|
|
145 |
)
|
146 |
|
147 |
faq()
|
148 |
+
return retriever
|
149 |
|
150 |
|
151 |
def chat_box() :
|
|
|
154 |
st.markdown(message["content"])
|
155 |
|
156 |
|
157 |
+
def generate_chat_stream(prompt, retriever) :
|
158 |
|
159 |
with st.spinner("Fetching relevant answers from source document..."):
|
160 |
response, sources = st.session_state.llm_object.mixtral_chat_inference(prompt, st.session_state.history, st.session_state.temperature,
|
161 |
+
st.session_state.top_p, st.session_state.repetition_penalty, retriever)
|
162 |
|
163 |
|
164 |
return response, sources
|
|
|
187 |
return llm_model.LlmModel()
|
188 |
|
189 |
st.set_page_config(page_title="Document QA Bot")
|
190 |
+
#lottie_book = load_lottieurl("https://assets4.lottiefiles.com/temp/lf20_aKAfIn.json")
|
191 |
+
#st_lottie(lottie_book, speed=1, height=200, key="initial")
|
192 |
# Place the title below the Lottie animation
|
193 |
st.title("Document Q&A Bot π€")
|
194 |
|
195 |
# initialize session state for streamlit app
|
196 |
init_state()
|
197 |
# Left Sidebar
|
198 |
+
retriever = sidebar()
|
199 |
chat_box()
|
200 |
|
201 |
if prompt := st.chat_input("Ask a question about your document!"):
|
|
|
203 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
204 |
|
205 |
try:
|
206 |
+
chat_stream, sources = generate_chat_stream(prompt, retriever)
|
207 |
|
208 |
with st.chat_message("assistant"):
|
209 |
placeholder = st.empty()
|