self-chat / app.py
xu song
update
d72c532
raw
history blame
5.88 kB
"""
来自 https://github.com/OpenLMLab/MOSS/blob/main/moss_web_demo_gradio.py
# 单卡报错
python moss_web_demo_gradio.py --model_name fnlp/moss-moon-003-sft --gpu 0,1,2,3
# TODO
- 第一句:
- 代码和表格的预览
- 可编辑chatbot:https://github.com/gradio-app/gradio/issues/4444
"""
from transformers.generation.utils import logger
import gradio as gr
import argparse
import warnings
import torch
import os
# from moss_util import generate_query
from models.qwen2_util import bot
# generate_query = None
# gr.ChatInterface
# from gpt35 import build_message_for_gpt35, send_one_query
#
# def postprocess(self, y):
# if y is None:
# return []
# for i, (message, response) in enumerate(y):
# y[i] = (
# None if message is None else mdtex2html.convert((message)),
# None if response is None else mdtex2html.convert(response),
# )
# return y
#
#
# gr.Chatbot.postprocess = postprocess
def parse_text(text):
"""copy from https://github.com/GaiZhenbiao/ChuanhuChatGPT/"""
lines = text.split("\n")
lines = [line for line in lines if line != ""]
count = 0
for i, line in enumerate(lines):
if "```" in line:
count += 1
items = line.split('`')
if count % 2 == 1:
lines[i] = f'<pre><code class="language-{items[-1]}">'
else:
lines[i] = f'<br></code></pre>'
else:
if i > 0:
if count % 2 == 1:
line = line.replace("`", "\`")
line = line.replace("<", "&lt;")
line = line.replace(">", "&gt;")
line = line.replace(" ", "&nbsp;")
line = line.replace("*", "&ast;")
line = line.replace("_", "&lowbar;")
line = line.replace("-", "&#45;")
line = line.replace(".", "&#46;")
line = line.replace("!", "&#33;")
line = line.replace("(", "&#40;")
line = line.replace(")", "&#41;")
line = line.replace("$", "&#36;")
lines[i] = "<br>" + line
text = "".join(lines)
return text
def generate_query(chatbot, history):
if history and history[-1][1] is None: # 该生成response了
return None, chatbot, history
query = bot.generate_query(history)
# chatbot.append((query, ""))
chatbot.append((query, None))
history = history + [(query, None)]
return query, chatbot, history
def generate_response(query, chatbot, history):
"""
自动模式下:query is None,或者 query = history[-1][0]
人工模式下:query 是任意值
:param query:
:param chatbot:
:param history:
:return:
"""
# messages = build_message_for_gpt35(query, history)
# response, success = send_one_query(query, messages, model="gpt-35-turbo")
# response = response["choices"][0]["message"]["content"]
#
if history[-1][1] is not None or chatbot[-1][1] is not None:
return chatbot, history
if query is None:
query = history[-1][0]
response = bot.generate_response(query, history[:-1])
# chatbot.append((query, response))
history[-1] = (query, response)
chatbot[-1] = (query, response)
print(f"chatbot is {chatbot}")
print(f"history is {history}")
return chatbot, history
def reset_user_input():
return gr.update(value='')
def reset_state():
return [], []
"""
TODO: 使用说明
avatar_images
"""
with gr.Blocks() as demo:
gr.HTML("""<h1 align="center">欢迎使用 self chat 人工智能助手!</h1>""")
gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
system = gr.Textbox(show_label=False, placeholder="You are a helpful assistant.")
chatbot = gr.Chatbot(avatar_images=("assets/profile.png", "assets/bot.png"))
with gr.Row():
with gr.Column(scale=4):
user_input = gr.Textbox(show_label=False, placeholder="Input...", lines=10)
with gr.Row():
generate_query_btn = gr.Button("生成问题")
regen_btn = gr.Button("🤔️ Regenerate (重试)")
submit_btn = gr.Button("生成回复", variant="primary")
stop_btn = gr.Button("停止生成", variant="primary")
empty_btn = gr.Button("🧹 Clear History (清除历史)")
with gr.Column(scale=1):
# generate_query_btn = gr.Button("Generate First Query")
clear_btn = gr.Button("重置")
gr.Dropdown(
["moss", "chatglm-2", "chatpdf"],
value="moss",
label="问题生成器",
# info="Will add more animals later!"
),
gr.Dropdown(
["moss", "chatglm-2", "gpt3.5-turbo"],
value="gpt3.5-turbo",
label="回复生成器",
# info="Will add more animals later!"
),
history = gr.State([]) # (message, bot_message)
submit_btn.click(generate_response, [user_input, chatbot, history], [chatbot, history],
show_progress=True)
# submit_btn.click(reset_user_input, [], [user_input])
clear_btn.click(reset_state, outputs=[chatbot, history], show_progress=True)
generate_query_btn.click(generate_query, [chatbot, history], outputs=[user_input, chatbot, history], show_progress=True)
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
demo.queue().launch(share=False)
# demo.queue().launch(share=True)