Spaces:
Paused
Paused
Commit
·
711470d
1
Parent(s):
127ad2f
Update app.py
Browse files
app.py
CHANGED
@@ -33,7 +33,7 @@ prompt_template = Template("""\
|
|
33 |
""")
|
34 |
|
35 |
|
36 |
-
system_prompt = "### Assistant: I am StableVicuna, a large language model created by CarperAI
|
37 |
system_prompt_tokens = tokenizer([f"{system_prompt}\n\n"], return_tensors="pt")
|
38 |
max_sys_tokens = system_prompt_tokens['input_ids'].size(-1)
|
39 |
|
@@ -107,7 +107,7 @@ def user(user_message, history):
|
|
107 |
|
108 |
|
109 |
with gr.Blocks() as demo:
|
110 |
-
gr.Markdown("#StableVicuna by CarperAI
|
111 |
gr.HTML("<a href='https://huggingface.co/CarperAI/stable-vicuna-13b-delta'><code>CarperAI/stable-vicuna-13b-delta</a>")
|
112 |
gr.HTML('''<center><a href="https://huggingface.co/spaces/CarperAI/StableVicuna?duplicate=true"><img src="https://bit.ly/3gLdBN6" alt="Duplicate Space"></a>Duplicate the Space to skip the queue and run in a private space</center>''')
|
113 |
|
|
|
33 |
""")
|
34 |
|
35 |
|
36 |
+
system_prompt = "### Assistant: I am StableVicuna, a large language model created by CarperAI. I am here to chat!"
|
37 |
system_prompt_tokens = tokenizer([f"{system_prompt}\n\n"], return_tensors="pt")
|
38 |
max_sys_tokens = system_prompt_tokens['input_ids'].size(-1)
|
39 |
|
|
|
107 |
|
108 |
|
109 |
with gr.Blocks() as demo:
|
110 |
+
gr.Markdown("#StableVicuna by CarperAI")
|
111 |
gr.HTML("<a href='https://huggingface.co/CarperAI/stable-vicuna-13b-delta'><code>CarperAI/stable-vicuna-13b-delta</a>")
|
112 |
gr.HTML('''<center><a href="https://huggingface.co/spaces/CarperAI/StableVicuna?duplicate=true"><img src="https://bit.ly/3gLdBN6" alt="Duplicate Space"></a>Duplicate the Space to skip the queue and run in a private space</center>''')
|
113 |
|