Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -33,6 +33,13 @@ tokenizer = transformers.AutoTokenizer.from_pretrained(
|
|
33 |
#use_auth_token=HF_AUTH
|
34 |
)
|
35 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
|
37 |
def prompt_build(system_prompt, user_inp, hist):
|
38 |
prompt = f"""### System:\n{system_prompt}\n\n"""
|
@@ -70,6 +77,7 @@ def chat(user_input, history, system_prompt):
|
|
70 |
|
71 |
|
72 |
with gr.Blocks() as demo:
|
|
|
73 |
system_prompt = gr.Textbox("You are helpful AI.", label="System Prompt")
|
74 |
chatbot = gr.ChatInterface(fn=chat, additional_inputs=[system_prompt])
|
75 |
|
|
|
33 |
#use_auth_token=HF_AUTH
|
34 |
)
|
35 |
|
36 |
+
DESCRIPTION = """
|
37 |
+
# StableBeluga2 70B Chat
|
38 |
+
This is a streaming Chat Interface implementation of [StableBeluga2](https://huggingface.co/stabilityai/StableBeluga2)
|
39 |
+
You can modify the system prompt, which can be quite fun. For example, you can try something like "You are a mean AI. Phrase all replies as insults" for a good laugh.
|
40 |
+
|
41 |
+
Sometimes the model doesn't appropriately hit its stop token. Feel free to hit "stop" and "retry" if this happens to you. Or PR a fix to stop the stream if the tokens for User: get hit or something.
|
42 |
+
"""
|
43 |
|
44 |
def prompt_build(system_prompt, user_inp, hist):
|
45 |
prompt = f"""### System:\n{system_prompt}\n\n"""
|
|
|
77 |
|
78 |
|
79 |
with gr.Blocks() as demo:
|
80 |
+
gr.Markdown(DESCRIPTION)
|
81 |
system_prompt = gr.Textbox("You are helpful AI.", label="System Prompt")
|
82 |
chatbot = gr.ChatInterface(fn=chat, additional_inputs=[system_prompt])
|
83 |
|