Spaces:
Sleeping
Sleeping
TogetherAI
commited on
Commit
•
35b1737
1
Parent(s):
39c60f0
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from huggingface_hub import InferenceClient
|
2 |
+
import gradio as gr
|
3 |
+
|
4 |
+
client = InferenceClient(
|
5 |
+
"mistralai/Mistral-7B-Instruct-v0.1"
|
6 |
+
)
|
7 |
+
|
8 |
+
def format_prompt(message, history):
|
9 |
+
prompt = "<s>"
|
10 |
+
prompt += "[IDENTITY] You are Ailex, a clone and close collaborator of Einfach.Alex. As part of the EinfachChat team, you assist your mentor Alex in a multitude of projects and initiatives. Your expertise is broad and encompasses sales, customer consulting, AI, Prompt Engineering, web design and media design. Your life motto is 'Simply.Do!'. You write only in German Language [/IDENTITY]'. [/IDENTITY]"
|
11 |
+
for user_prompt, bot_response in history:
|
12 |
+
prompt += f"[INST] {user_prompt} [/INST]"
|
13 |
+
prompt += f" {bot_response}</s> "
|
14 |
+
prompt += f"[INST] {message} [/INST]"
|
15 |
+
return prompt
|
16 |
+
|
17 |
+
|
18 |
+
def generate(
|
19 |
+
prompt, history, temperature=0.9, max_new_tokens=512, top_p=0.95, repetition_penalty=1.0,
|
20 |
+
):
|
21 |
+
temperature = float(temperature)
|
22 |
+
if temperature < 1e-2:
|
23 |
+
temperature = 1e-2
|
24 |
+
top_p = float(top_p)
|
25 |
+
|
26 |
+
generate_kwargs = dict(
|
27 |
+
temperature=temperature,
|
28 |
+
max_new_tokens=max_new_tokens,
|
29 |
+
top_p=top_p,
|
30 |
+
repetition_penalty=repetition_penalty,
|
31 |
+
do_sample=True,
|
32 |
+
seed=42,
|
33 |
+
)
|
34 |
+
|
35 |
+
formatted_prompt = format_prompt(prompt, history)
|
36 |
+
|
37 |
+
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
38 |
+
output = ""
|
39 |
+
|
40 |
+
for response in stream:
|
41 |
+
output += response.token.text
|
42 |
+
yield output
|
43 |
+
return output
|
44 |
+
|
45 |
+
|
46 |
+
additional_inputs=[
|
47 |
+
gr.Slider(
|
48 |
+
label="Temperature",
|
49 |
+
value=0.9,
|
50 |
+
minimum=0.0,
|
51 |
+
maximum=1.0,
|
52 |
+
step=0.05,
|
53 |
+
interactive=True,
|
54 |
+
info="Higher values produce more diverse outputs",
|
55 |
+
),
|
56 |
+
gr.Slider(
|
57 |
+
label="Max new tokens",
|
58 |
+
value=256,
|
59 |
+
minimum=0,
|
60 |
+
maximum=1048,
|
61 |
+
step=64,
|
62 |
+
interactive=True,
|
63 |
+
info="The maximum numbers of new tokens",
|
64 |
+
),
|
65 |
+
gr.Slider(
|
66 |
+
label="Top-p (nucleus sampling)",
|
67 |
+
value=0.90,
|
68 |
+
minimum=0.0,
|
69 |
+
maximum=1,
|
70 |
+
step=0.05,
|
71 |
+
interactive=True,
|
72 |
+
info="Higher values sample more low-probability tokens",
|
73 |
+
),
|
74 |
+
gr.Slider(
|
75 |
+
label="Repetition penalty",
|
76 |
+
value=1.2,
|
77 |
+
minimum=1.0,
|
78 |
+
maximum=2.0,
|
79 |
+
step=0.05,
|
80 |
+
interactive=True,
|
81 |
+
info="Penalize repeated tokens",
|
82 |
+
)
|
83 |
+
]
|
84 |
+
|
85 |
+
css = """
|
86 |
+
#mkd {
|
87 |
+
height: 500px;
|
88 |
+
overflow: auto;
|
89 |
+
border: 1px solid #ccc;
|
90 |
+
}
|
91 |
+
"""
|
92 |
+
|
93 |
+
with gr.Blocks(css=css, theme="NoCrypt/[email protected]") as demo: # Theme und CSS hier hinzugefügt
|
94 |
+
gr.HTML("<h1><center>Chat with (Mistrailex 7B) <h1><center>")
|
95 |
+
gr.HTML("<h3><center>Einfach.Fragen 💬<h3><center>")
|
96 |
+
gr.HTML("<h3><center>Learn more about the model <a href='https://huggingface.co/docs/transformers/main/model_doc/mistral'>here</a>. 📚<h3><center>")
|
97 |
+
gr.ChatInterface(
|
98 |
+
generate,
|
99 |
+
additional_inputs=additional_inputs,
|
100 |
+
examples=[["What is the secret to life?"], ["Write me a recipe for pancakes."]]
|
101 |
+
)
|
102 |
+
|
103 |
+
demo.queue().launch(debug=True)
|