pflooky commited on
Commit
6df72ab
β€’
1 Parent(s): d4f5649

Fix likeable button

Browse files
Files changed (1) hide show
  1. app.py +2 -10
app.py CHANGED
@@ -14,7 +14,7 @@ uploaded_df = gr.Dataframe(headers=["file_name", "content_length"])
14
  upload_files_section = gr.Files(
15
  file_types=[".md", ".mdx", ".rst", ".txt"],
16
  )
17
- chatbot_stream = gr.Chatbot(bubble_full_width=False, show_copy_button=True)
18
 
19
 
20
  def load_docs(files):
@@ -100,13 +100,6 @@ def predict(message, history):
100
  # return format_answer
101
 
102
 
103
- def vote(data: gr.LikeData):
104
- if data.liked:
105
- gr.Info("You upvoted this response 😊", )
106
- else:
107
- gr.Warning("You downvoted this response πŸ‘€")
108
-
109
-
110
  vector_db = get_vector_db()
111
  llm_model = get_llm_model(vector_db)
112
 
@@ -116,7 +109,7 @@ chat_interface_stream = gr.ChatInterface(
116
  description="πŸ“šπŸ”¦ Upload some documents on the side and ask questions!",
117
  textbox=gr.Textbox(container=False, scale=7),
118
  chatbot=chatbot_stream,
119
- examples=["What is Data Caterer?"]
120
  ).queue(default_concurrency_limit=1)
121
 
122
  with gr.Blocks() as blocks:
@@ -131,7 +124,6 @@ with gr.Blocks() as blocks:
131
  )
132
  # upload_files_section.upload(load_docs, inputs=upload_files_section)
133
  with gr.Column(scale=4, min_width=600) as chat_col:
134
- chatbot_stream.like(vote, None, None)
135
  chat_interface_stream.render()
136
 
137
  blocks.queue().launch()
 
14
  upload_files_section = gr.Files(
15
  file_types=[".md", ".mdx", ".rst", ".txt"],
16
  )
17
+ chatbot_stream = gr.Chatbot(bubble_full_width=False, show_copy_button=True, likeable=True)
18
 
19
 
20
  def load_docs(files):
 
100
  # return format_answer
101
 
102
 
 
 
 
 
 
 
 
103
  vector_db = get_vector_db()
104
  llm_model = get_llm_model(vector_db)
105
 
 
109
  description="πŸ“šπŸ”¦ Upload some documents on the side and ask questions!",
110
  textbox=gr.Textbox(container=False, scale=7),
111
  chatbot=chatbot_stream,
112
+ examples=["What is Data Caterer?"],
113
  ).queue(default_concurrency_limit=1)
114
 
115
  with gr.Blocks() as blocks:
 
124
  )
125
  # upload_files_section.upload(load_docs, inputs=upload_files_section)
126
  with gr.Column(scale=4, min_width=600) as chat_col:
 
127
  chat_interface_stream.render()
128
 
129
  blocks.queue().launch()