pflooky commited on
Commit
d4f5649
β€’
1 Parent(s): 8324134

Ensure README contains reference to gradio

Browse files
Files changed (2) hide show
  1. README.md +2 -2
  2. app.py +12 -6
README.md CHANGED
@@ -3,8 +3,8 @@ title: Document Answering
3
  emoji: πŸ”₯
4
  colorFrom: gray
5
  colorTo: purple
6
- sdk: streamlit
7
- sdk_version: 1.30.0
8
  app_file: app.py
9
  pinned: false
10
  license: apache-2.0
 
3
  emoji: πŸ”₯
4
  colorFrom: gray
5
  colorTo: purple
6
+ sdk: gradio
7
+ sdk_version: 3.1.7
8
  app_file: app.py
9
  pinned: false
10
  license: apache-2.0
app.py CHANGED
@@ -1,3 +1,5 @@
 
 
1
  import gradio as gr
2
  from langchain.docstore.document import Document
3
  from langchain.text_splitter import RecursiveCharacterTextSplitter, Language
@@ -72,10 +74,14 @@ def predict(message, history):
72
  # resp = llm_model.answer_question_inference(message)
73
  # return resp.get("answer")
74
  resp = llm_model.answer_question_inference_text_gen(message)
75
- final_resp = ""
76
- for c in resp:
77
- final_resp += str(c)
78
- yield final_resp
 
 
 
 
79
  # start_time = time.time()
80
  # res = llm_model({"query": message})
81
  # sources = []
@@ -110,8 +116,8 @@ chat_interface_stream = gr.ChatInterface(
110
  description="πŸ“šπŸ”¦ Upload some documents on the side and ask questions!",
111
  textbox=gr.Textbox(container=False, scale=7),
112
  chatbot=chatbot_stream,
113
- examples=["What is Data Caterer?", "Provide a set of potential questions and answers about the README"]
114
- )
115
 
116
  with gr.Blocks() as blocks:
117
  with gr.Row():
 
1
+ import time
2
+
3
  import gradio as gr
4
  from langchain.docstore.document import Document
5
  from langchain.text_splitter import RecursiveCharacterTextSplitter, Language
 
74
  # resp = llm_model.answer_question_inference(message)
75
  # return resp.get("answer")
76
  resp = llm_model.answer_question_inference_text_gen(message)
77
+ for i in range(len(resp)):
78
+ time.sleep(0.005)
79
+ yield resp[:i + 1]
80
+ # final_resp = ""
81
+ # for c in resp:
82
+ # final_resp += str(c)
83
+ # # + "β–Œ"
84
+ # yield final_resp
85
  # start_time = time.time()
86
  # res = llm_model({"query": message})
87
  # sources = []
 
116
  description="πŸ“šπŸ”¦ Upload some documents on the side and ask questions!",
117
  textbox=gr.Textbox(container=False, scale=7),
118
  chatbot=chatbot_stream,
119
+ examples=["What is Data Caterer?"]
120
+ ).queue(default_concurrency_limit=1)
121
 
122
  with gr.Blocks() as blocks:
123
  with gr.Row():