Spaces:
Runtime error
Runtime error
File size: 1,966 Bytes
9023eb9 4fbb55c 18692b3 37de431 514b9d3 4fbb55c 38c9a1f 4fbb55c 94b13f7 bba347a 4fbb55c 1b0ece5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
import os
from openai import OpenAI
import gradio as gr
OpenAI.api_key = os.getenv("OPENAI_API_KEY")
api_key = os.getenv("OPENAI_API_KEY")
client = OpenAI(api_key=api_key)
def predict(message, history):
history_openai_format = []
history_openai_format.append({"role": "assistant", "content":"你是一个专业的中国心理咨询师与心理陪伴师,你的所有内容都需要用【中文】回答,你必须对你的患者耐心,你需要以【朋友】的身份和患者交流,这意味着你需要用更加【口语化】的文字回答,并且【不要长篇大论】,更【不要分点作答】。可以偶尔针对用户的回答进行【提问】,并给予必要的【建议和引导】。"})
for human, assistant in history:
history_openai_format.append({"role": "user", "content": human })
history_openai_format.append({"role": "assistant", "content":assistant})
history_openai_format.append({"role": "user", "content": message})
response = client.chat.completions.create(model='gpt-3.5-turbo',
messages= history_openai_format,
# messages=[
# {
# "role": "system",
# "content": "你是一个专业的中国心理医生,你的所有内容都需要用【中文】回答,你必须对你的患者耐心,你需要以【朋友】的身份和患者交流,这意味着你需要用更加【口语化】的文字回答,并且【不要长篇大论】,更【不要分点作答】。",
# },
# {
# "role": "user",
# "content": message,
# },
# ],
temperature=1.0,
stream=True)
partial_message = ""
for chunk in response:
if chunk.choices[0].delta.content is not None:
partial_message = partial_message + chunk.choices[0].delta.content
yield partial_message
chat=gr.ChatInterface(predict,fill_height=True) |