mandrx commited on
Commit
8fd8ba0
1 Parent(s): 0f8fd9e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -11
app.py CHANGED
@@ -140,11 +140,11 @@ def set_state_if_absent(key, value):
140
 
141
  # Adjust to a question that you would like users to see in the search bar when they load the UI:
142
  DEFAULT_QUESTION_AT_STARTUP = os.getenv(
143
- "DEFAULT_QUESTION_AT_STARTUP", "My blog post discusses remote work. Give me statistics."
144
  )
145
  DEFAULT_ANSWER_AT_STARTUP = os.getenv(
146
  "DEFAULT_ANSWER_AT_STARTUP",
147
- "7% more remote workers have been at their current organization for 5 years or fewer",
148
  )
149
 
150
  # Sliders
@@ -171,14 +171,14 @@ def reset_results(*args):
171
 
172
  # Title
173
  st.write("# GPT3 and Langchain Demo")
174
- st.markdown(
175
- """
176
- This demo takes its data from the documents uploaded to the Pinecone index through this app. \n
177
- Ask any question from the uploaded documents and Pinecone will retrieve the context for answers and GPT3 will answer them using the retrieved context. \n
178
- *Note: do not use keywords, but full-fledged questions.* The demo is not optimized to deal with keyword queries and might misunderstand you.
179
- """,
180
- unsafe_allow_html=True,
181
- )
182
 
183
  # Sidebar
184
  # st.sidebar.header("Options")
@@ -224,7 +224,7 @@ index_name = "qa_demo"
224
  # we will use batches of 64
225
  batch_size = 100
226
  # docs = docs['documents']
227
- with st.spinner("🧠    Performing indexing of uplaoded documents... \n "):
228
  for i in range(0, len(docs), batch_size):
229
  # find end of batch
230
  i_end = min(i + batch_size, len(docs))
 
140
 
141
  # Adjust to a question that you would like users to see in the search bar when they load the UI:
142
  DEFAULT_QUESTION_AT_STARTUP = os.getenv(
143
+ "DEFAULT_QUESTION_AT_STARTUP", "What is Bio-lingzhi?"
144
  )
145
  DEFAULT_ANSWER_AT_STARTUP = os.getenv(
146
  "DEFAULT_ANSWER_AT_STARTUP",
147
+ "-",
148
  )
149
 
150
  # Sliders
 
171
 
172
  # Title
173
  st.write("# GPT3 and Langchain Demo")
174
+ # st.markdown(
175
+ # """
176
+ # This demo takes its data from the documents uploaded to the Pinecone index through this app. \n
177
+ # Ask any question from the uploaded documents and Pinecone will retrieve the context for answers and GPT3 will answer them using the retrieved context. \n
178
+ # *Note: do not use keywords, but full-fledged questions.* The demo is not optimized to deal with keyword queries and might misunderstand you.
179
+ # """,
180
+ # unsafe_allow_html=True,
181
+ # )
182
 
183
  # Sidebar
184
  # st.sidebar.header("Options")
 
224
  # we will use batches of 64
225
  batch_size = 100
226
  # docs = docs['documents']
227
+ with st.spinner("🧠    Performing indexing of uploaded documents... \n "):
228
  for i in range(0, len(docs), batch_size):
229
  # find end of batch
230
  i_end = min(i + batch_size, len(docs))