Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -7,12 +7,12 @@ client = InferenceClient(
|
|
7 |
|
8 |
|
9 |
def format_prompt(message, history):
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
|
17 |
def generate(
|
18 |
prompt, history, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
|
@@ -88,7 +88,7 @@ css = """
|
|
88 |
border: 0.5px solid #ccc;
|
89 |
}
|
90 |
"""
|
91 |
-
article = "Inspired by [Skier8402](
|
92 |
|
93 |
|
94 |
with gr.Blocks(css=css, article=article) as demo:
|
@@ -98,5 +98,4 @@ with gr.Blocks(css=css, article=article) as demo:
|
|
98 |
additional_inputs=additional_inputs,
|
99 |
examples=[["Joke on darth vader and gandalf"], ["Write me a recipe for Chicken Tikka Masala."]]
|
100 |
)
|
101 |
-
|
102 |
-
demo.queue().launch(debug=True)
|
|
|
7 |
|
8 |
|
9 |
def format_prompt(message, history):
|
10 |
+
prompt = "<s>"
|
11 |
+
for user_prompt, bot_response in history:
|
12 |
+
prompt += f"[INST] {user_prompt} [/INST]"
|
13 |
+
prompt += f" {bot_response}</s> "
|
14 |
+
prompt += f"[INST] {message} [/INST]"
|
15 |
+
return prompt
|
16 |
|
17 |
def generate(
|
18 |
prompt, history, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
|
|
|
88 |
border: 0.5px solid #ccc;
|
89 |
}
|
90 |
"""
|
91 |
+
article = "Inspired by [Skier8402](https://huggingface.co/spaces/Skier8402/mistral-super-fast)"
|
92 |
|
93 |
|
94 |
with gr.Blocks(css=css, article=article) as demo:
|
|
|
98 |
additional_inputs=additional_inputs,
|
99 |
examples=[["Joke on darth vader and gandalf"], ["Write me a recipe for Chicken Tikka Masala."]]
|
100 |
)
|
101 |
+
demo.queue().launch(debug=True)
|
|