Tonic commited on
Commit
f72bc8b
·
verified ·
1 Parent(s): cfe7f69

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -15
app.py CHANGED
@@ -5,14 +5,29 @@ import torch
5
  from datetime import datetime
6
  import os
7
 
8
- # Model description
 
 
9
  description = """
10
- # 🇫🇷 Lucie-7B-Instruct
11
 
12
- Lucie is a French language model based on Mistral-7B, fine-tuned on French data and instructions.
13
- This demo allows you to interact with the model and adjust various generation parameters.
14
  """
15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  join_us = """
17
  ## Join us:
18
  🌟TeamTonic🌟 is always making cool demos! Join our active builder's 🛠️community 👻
@@ -102,7 +117,11 @@ with gr.Blocks() as demo:
102
  **Padding Token:** {tokenizer.pad_token}
103
  **EOS Token:** {tokenizer.eos_token}
104
  """)
105
- with gr.Row():
 
 
 
 
106
  gr.Markdown(join_us)
107
 
108
  with gr.Row():
@@ -116,54 +135,54 @@ with gr.Blocks() as demo:
116
 
117
  # User prompt
118
  user_prompt = gr.Textbox(
119
- label="Votre message",
120
  placeholder="Entrez votre texte ici...",
121
  lines=5
122
  )
123
 
124
- with gr.Accordion("Paramètres avancés", open=False):
125
  temperature = gr.Slider(
126
  minimum=0.1,
127
  maximum=2.0,
128
  value=0.7,
129
  step=0.1,
130
- label="Temperature"
131
  )
132
  max_new_tokens = gr.Slider(
133
  minimum=1,
134
  maximum=2048,
135
  value=512,
136
  step=1,
137
- label="Longueur maximale"
138
  )
139
  top_p = gr.Slider(
140
  minimum=0.1,
141
  maximum=1.0,
142
  value=0.9,
143
  step=0.1,
144
- label="Top-p"
145
  )
146
  top_k = gr.Slider(
147
  minimum=1,
148
  maximum=100,
149
  value=50,
150
  step=1,
151
- label="Top-k"
152
  )
153
  repetition_penalty = gr.Slider(
154
  minimum=1.0,
155
  maximum=2.0,
156
  value=1.2,
157
  step=0.1,
158
- label="Pénalité de répétition"
159
  )
160
 
161
- generate_btn = gr.Button("Générer")
162
 
163
  with gr.Column():
164
  # Output component
165
  output = gr.Textbox(
166
- label="Réponse de Lucie",
167
  lines=10
168
  )
169
 
@@ -227,7 +246,7 @@ with gr.Blocks() as demo:
227
  top_k
228
  ],
229
  outputs=output,
230
- label="Exemples de prompts avec paramètres optimisés"
231
  )
232
 
233
  # Set up the generation event
 
5
  from datetime import datetime
6
  import os
7
 
8
+
9
+ Title = """# Welcome to 🌟Tonic's 🌠Lucie-7B-Instruct Demo"""
10
+
11
  description = """
12
+ 🌠Lucie-7B-Instruct is a fine-tuned version of [Lucie-7B](https://huggingface.co/OpenLLM-France/Lucie-7B), an open-source, multilingual causal language model created by OpenLLM-France.
13
 
14
+ 🌠Lucie-7B-Instruct is fine-tuned on synthetic instructions produced by ChatGPT and Gemma and a small set of customized prompts about OpenLLM and Lucie.
 
15
  """
16
 
17
+ training = """
18
+ ## Training details
19
+
20
+ ### Training data
21
+
22
+ Lucie-7B-Instruct is trained on the following datasets:
23
+ * [Alpaca-cleaned](https://huggingface.co/datasets/yahma/alpaca-cleaned) (English; 51604 samples)
24
+ * [Alpaca-cleaned-fr](https://huggingface.co/datasets/cmh/alpaca_data_cleaned_fr_52k) (French; 51655 samples)
25
+ * [Magpie-Gemma](https://huggingface.co/datasets/Magpie-Align/Magpie-Gemma2-Pro-200K-Filtered) (English; 195167 samples)
26
+ * [Wildchat](https://huggingface.co/datasets/allenai/WildChat-1M) (French subset; 26436 samples)
27
+ * Hard-coded prompts concerning OpenLLM and Lucie (based on [allenai/tulu-3-hard-coded-10x](https://huggingface.co/datasets/allenai/tulu-3-hard-coded-10x))
28
+ * French: openllm_french.jsonl (24x10 samples)
29
+ * English: openllm_english.jsonl (24x10 samples)"""
30
+
31
  join_us = """
32
  ## Join us:
33
  🌟TeamTonic🌟 is always making cool demos! Join our active builder's 🛠️community 👻
 
117
  **Padding Token:** {tokenizer.pad_token}
118
  **EOS Token:** {tokenizer.eos_token}
119
  """)
120
+
121
+ with gr.Collumn():
122
+ gr.Markdown(training)
123
+
124
+ with gr.Row():
125
  gr.Markdown(join_us)
126
 
127
  with gr.Row():
 
135
 
136
  # User prompt
137
  user_prompt = gr.Textbox(
138
+ label="🗣️Votre message",
139
  placeholder="Entrez votre texte ici...",
140
  lines=5
141
  )
142
 
143
+ with gr.Accordion("🧪Paramètres avancés", open=False):
144
  temperature = gr.Slider(
145
  minimum=0.1,
146
  maximum=2.0,
147
  value=0.7,
148
  step=0.1,
149
+ label="🌡️Temperature"
150
  )
151
  max_new_tokens = gr.Slider(
152
  minimum=1,
153
  maximum=2048,
154
  value=512,
155
  step=1,
156
+ label="💶Longueur maximale"
157
  )
158
  top_p = gr.Slider(
159
  minimum=0.1,
160
  maximum=1.0,
161
  value=0.9,
162
  step=0.1,
163
+ label="🏅Top-p"
164
  )
165
  top_k = gr.Slider(
166
  minimum=1,
167
  maximum=100,
168
  value=50,
169
  step=1,
170
+ label="🏆Top-k"
171
  )
172
  repetition_penalty = gr.Slider(
173
  minimum=1.0,
174
  maximum=2.0,
175
  value=1.2,
176
  step=0.1,
177
+ label="🦜Pénalité de répétition"
178
  )
179
 
180
+ generate_btn = gr.Button("🌠Générer")
181
 
182
  with gr.Column():
183
  # Output component
184
  output = gr.Textbox(
185
+ label="🌠Lucie",
186
  lines=10
187
  )
188
 
 
246
  top_k
247
  ],
248
  outputs=output,
249
+ label="Exemples"
250
  )
251
 
252
  # Set up the generation event