Tuchuanhuhuhu commited on
Commit
6b85336
·
1 Parent(s): 4389e9f

完善删除上条对话功能

Browse files
Files changed (2) hide show
  1. ChuanhuChatbot.py +1 -1
  2. utils.py +2 -2
ChuanhuChatbot.py CHANGED
@@ -120,7 +120,7 @@ with gr.Blocks(css=customCSS) as demo:
120
 
121
  retryBtn.click(retry, [keyTxt, systemPromptTxt, history, chatbot, token_count, top_p, temperature, use_streaming_checkbox, model_select_dropdown], [chatbot, history, status_display, token_count], show_progress=True)
122
 
123
- delLastBtn.click(delete_last_conversation, [chatbot, history, token_count, use_streaming_checkbox], [
124
  chatbot, history, token_count, status_display], show_progress=True)
125
 
126
  reduceTokenBtn.click(reduce_token_size, [keyTxt, systemPromptTxt, history, chatbot, token_count, top_p, temperature, use_streaming_checkbox, model_select_dropdown], [chatbot, history, status_display, token_count], show_progress=True)
 
120
 
121
  retryBtn.click(retry, [keyTxt, systemPromptTxt, history, chatbot, token_count, top_p, temperature, use_streaming_checkbox, model_select_dropdown], [chatbot, history, status_display, token_count], show_progress=True)
122
 
123
+ delLastBtn.click(delete_last_conversation, [chatbot, history, token_count], [
124
  chatbot, history, token_count, status_display], show_progress=True)
125
 
126
  reduceTokenBtn.click(reduce_token_size, [keyTxt, systemPromptTxt, history, chatbot, token_count, top_p, temperature, use_streaming_checkbox, model_select_dropdown], [chatbot, history, status_display, token_count], show_progress=True)
utils.py CHANGED
@@ -280,7 +280,7 @@ def reduce_token_size(openai_api_key, system_prompt, history, chatbot, token_cou
280
  print("减少token数量完毕")
281
 
282
 
283
- def delete_last_conversation(chatbot, history, previous_token_count, streaming):
284
  if len(chatbot) > 0 and standard_error_msg in chatbot[-1][1]:
285
  print("由于包含报错信息,只删除chatbot记录")
286
  chatbot.pop()
@@ -295,7 +295,7 @@ def delete_last_conversation(chatbot, history, previous_token_count, streaming):
295
  if len(previous_token_count) > 0:
296
  print("删除了一组对话的token计数记录")
297
  previous_token_count.pop()
298
- return chatbot, history, previous_token_count, construct_token_message(sum(previous_token_count), streaming)
299
 
300
 
301
  def save_chat_history(filename, system, history, chatbot):
 
280
  print("减少token数量完毕")
281
 
282
 
283
+ def delete_last_conversation(chatbot, history, previous_token_count):
284
  if len(chatbot) > 0 and standard_error_msg in chatbot[-1][1]:
285
  print("由于包含报错信息,只删除chatbot记录")
286
  chatbot.pop()
 
295
  if len(previous_token_count) > 0:
296
  print("删除了一组对话的token计数记录")
297
  previous_token_count.pop()
298
+ return chatbot, history, previous_token_count, construct_token_message(sum(previous_token_count))
299
 
300
 
301
  def save_chat_history(filename, system, history, chatbot):