hayas commited on
Commit
1e56efd
1 Parent(s): 6a11c2e

gradio==5.1.0

Browse files
Files changed (7) hide show
  1. .python-version +1 -0
  2. README.md +1 -1
  3. app.py +8 -13
  4. pyproject.toml +15 -0
  5. requirements.txt +241 -8
  6. style.css +0 -6
  7. uv.lock +0 -0
.python-version ADDED
@@ -0,0 +1 @@
 
 
1
+ 3.10
README.md CHANGED
@@ -4,7 +4,7 @@ emoji: ⚡
4
  colorFrom: red
5
  colorTo: purple
6
  sdk: gradio
7
- sdk_version: 4.27.0
8
  app_file: app.py
9
  pinned: false
10
  license: mit
 
4
  colorFrom: red
5
  colorTo: purple
6
  sdk: gradio
7
+ sdk_version: 5.1.0
8
  app_file: app.py
9
  pinned: false
10
  license: mit
app.py CHANGED
@@ -65,6 +65,7 @@ def generate(
65
  temperature=temperature,
66
  num_beams=1,
67
  repetition_penalty=repetition_penalty,
 
68
  )
69
  t = Thread(target=model.generate, kwargs=generate_kwargs)
70
  t.start()
@@ -75,10 +76,10 @@ def generate(
75
  yield "".join(outputs)
76
 
77
 
78
- chat_interface = gr.ChatInterface(
79
  fn=generate,
80
- chatbot=gr.Chatbot(show_label=False, layout="panel", height=600),
81
- additional_inputs_accordion_name="詳細設定",
82
  additional_inputs=[
83
  gr.Slider(
84
  label="Max new tokens",
@@ -123,16 +124,10 @@ chat_interface = gr.ChatInterface(
123
  ["暴れん坊将軍って誰のこと?"],
124
  ["人がヘリを食べるのにかかる時間は?"],
125
  ],
 
 
 
126
  )
127
 
128
- with gr.Blocks(css="style.css") as demo:
129
- gr.Markdown(DESCRIPTION)
130
- gr.DuplicateButton(
131
- value="Duplicate Space for private use",
132
- elem_id="duplicate-button",
133
- visible=os.getenv("SHOW_DUPLICATE_BUTTON") == "1",
134
- )
135
- chat_interface.render()
136
-
137
  if __name__ == "__main__":
138
- demo.queue(max_size=20).launch()
 
65
  temperature=temperature,
66
  num_beams=1,
67
  repetition_penalty=repetition_penalty,
68
+ pad_token_id=tokenizer.eos_token_id,
69
  )
70
  t = Thread(target=model.generate, kwargs=generate_kwargs)
71
  t.start()
 
76
  yield "".join(outputs)
77
 
78
 
79
+ demo = gr.ChatInterface(
80
  fn=generate,
81
+ type="tuples",
82
+ additional_inputs_accordion=gr.Accordion(label="詳細設定", open=False),
83
  additional_inputs=[
84
  gr.Slider(
85
  label="Max new tokens",
 
124
  ["暴れん坊将軍って誰のこと?"],
125
  ["人がヘリを食べるのにかかる時間は?"],
126
  ],
127
+ description=DESCRIPTION,
128
+ css_paths="style.css",
129
+ fill_height=True,
130
  )
131
 
 
 
 
 
 
 
 
 
 
132
  if __name__ == "__main__":
133
+ demo.launch()
pyproject.toml ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "rakutenai-7b-chat"
3
+ version = "0.1.0"
4
+ description = ""
5
+ readme = "README.md"
6
+ requires-python = ">=3.10"
7
+ dependencies = [
8
+ "accelerate>=1.0.1",
9
+ "bitsandbytes>=0.44.1",
10
+ "gradio>=5.1.0",
11
+ "hf-transfer>=0.1.8",
12
+ "spaces>=0.30.4",
13
+ "torch==2.4.0",
14
+ "transformers>=4.45.2",
15
+ ]
requirements.txt CHANGED
@@ -1,8 +1,241 @@
1
- accelerate==0.29.3
2
- bitsandbytes==0.43.1
3
- gradio==4.27.0
4
- scipy==1.13.0
5
- sentencepiece==0.1.99
6
- spaces==0.26.1
7
- torch==2.0.0
8
- transformers==4.40.0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file was autogenerated by uv via the following command:
2
+ # uv pip compile pyproject.toml -o requirements.txt
3
+ accelerate==1.0.1
4
+ # via rakutenai-7b-chat (pyproject.toml)
5
+ aiofiles==23.2.1
6
+ # via gradio
7
+ annotated-types==0.7.0
8
+ # via pydantic
9
+ anyio==4.6.2.post1
10
+ # via
11
+ # gradio
12
+ # httpx
13
+ # starlette
14
+ bitsandbytes==0.44.1
15
+ # via rakutenai-7b-chat (pyproject.toml)
16
+ certifi==2024.8.30
17
+ # via
18
+ # httpcore
19
+ # httpx
20
+ # requests
21
+ charset-normalizer==3.4.0
22
+ # via requests
23
+ click==8.1.7
24
+ # via
25
+ # typer
26
+ # uvicorn
27
+ exceptiongroup==1.2.2
28
+ # via anyio
29
+ fastapi==0.115.2
30
+ # via gradio
31
+ ffmpy==0.4.0
32
+ # via gradio
33
+ filelock==3.16.1
34
+ # via
35
+ # huggingface-hub
36
+ # torch
37
+ # transformers
38
+ # triton
39
+ fsspec==2024.9.0
40
+ # via
41
+ # gradio-client
42
+ # huggingface-hub
43
+ # torch
44
+ gradio==5.1.0
45
+ # via
46
+ # rakutenai-7b-chat (pyproject.toml)
47
+ # spaces
48
+ gradio-client==1.4.0
49
+ # via gradio
50
+ h11==0.14.0
51
+ # via
52
+ # httpcore
53
+ # uvicorn
54
+ hf-transfer==0.1.8
55
+ # via rakutenai-7b-chat (pyproject.toml)
56
+ httpcore==1.0.6
57
+ # via httpx
58
+ httpx==0.27.2
59
+ # via
60
+ # gradio
61
+ # gradio-client
62
+ # spaces
63
+ huggingface-hub==0.26.0
64
+ # via
65
+ # accelerate
66
+ # gradio
67
+ # gradio-client
68
+ # tokenizers
69
+ # transformers
70
+ idna==3.10
71
+ # via
72
+ # anyio
73
+ # httpx
74
+ # requests
75
+ jinja2==3.1.4
76
+ # via
77
+ # gradio
78
+ # torch
79
+ markdown-it-py==3.0.0
80
+ # via rich
81
+ markupsafe==2.1.5
82
+ # via
83
+ # gradio
84
+ # jinja2
85
+ mdurl==0.1.2
86
+ # via markdown-it-py
87
+ mpmath==1.3.0
88
+ # via sympy
89
+ networkx==3.4.1
90
+ # via torch
91
+ numpy==2.1.2
92
+ # via
93
+ # accelerate
94
+ # bitsandbytes
95
+ # gradio
96
+ # pandas
97
+ # transformers
98
+ nvidia-cublas-cu12==12.1.3.1
99
+ # via
100
+ # nvidia-cudnn-cu12
101
+ # nvidia-cusolver-cu12
102
+ # torch
103
+ nvidia-cuda-cupti-cu12==12.1.105
104
+ # via torch
105
+ nvidia-cuda-nvrtc-cu12==12.1.105
106
+ # via torch
107
+ nvidia-cuda-runtime-cu12==12.1.105
108
+ # via torch
109
+ nvidia-cudnn-cu12==9.1.0.70
110
+ # via torch
111
+ nvidia-cufft-cu12==11.0.2.54
112
+ # via torch
113
+ nvidia-curand-cu12==10.3.2.106
114
+ # via torch
115
+ nvidia-cusolver-cu12==11.4.5.107
116
+ # via torch
117
+ nvidia-cusparse-cu12==12.1.0.106
118
+ # via
119
+ # nvidia-cusolver-cu12
120
+ # torch
121
+ nvidia-nccl-cu12==2.20.5
122
+ # via torch
123
+ nvidia-nvjitlink-cu12==12.6.77
124
+ # via
125
+ # nvidia-cusolver-cu12
126
+ # nvidia-cusparse-cu12
127
+ nvidia-nvtx-cu12==12.1.105
128
+ # via torch
129
+ orjson==3.10.9
130
+ # via gradio
131
+ packaging==24.1
132
+ # via
133
+ # accelerate
134
+ # gradio
135
+ # gradio-client
136
+ # huggingface-hub
137
+ # spaces
138
+ # transformers
139
+ pandas==2.2.3
140
+ # via gradio
141
+ pillow==10.4.0
142
+ # via gradio
143
+ psutil==5.9.8
144
+ # via
145
+ # accelerate
146
+ # spaces
147
+ pydantic==2.9.2
148
+ # via
149
+ # fastapi
150
+ # gradio
151
+ # spaces
152
+ pydantic-core==2.23.4
153
+ # via pydantic
154
+ pydub==0.25.1
155
+ # via gradio
156
+ pygments==2.18.0
157
+ # via rich
158
+ python-dateutil==2.9.0.post0
159
+ # via pandas
160
+ python-multipart==0.0.12
161
+ # via gradio
162
+ pytz==2024.2
163
+ # via pandas
164
+ pyyaml==6.0.2
165
+ # via
166
+ # accelerate
167
+ # gradio
168
+ # huggingface-hub
169
+ # transformers
170
+ regex==2024.9.11
171
+ # via transformers
172
+ requests==2.32.3
173
+ # via
174
+ # huggingface-hub
175
+ # spaces
176
+ # transformers
177
+ rich==13.9.2
178
+ # via typer
179
+ ruff==0.7.0
180
+ # via gradio
181
+ safetensors==0.4.5
182
+ # via
183
+ # accelerate
184
+ # transformers
185
+ semantic-version==2.10.0
186
+ # via gradio
187
+ shellingham==1.5.4
188
+ # via typer
189
+ six==1.16.0
190
+ # via python-dateutil
191
+ sniffio==1.3.1
192
+ # via
193
+ # anyio
194
+ # httpx
195
+ spaces==0.30.4
196
+ # via rakutenai-7b-chat (pyproject.toml)
197
+ starlette==0.40.0
198
+ # via fastapi
199
+ sympy==1.13.3
200
+ # via torch
201
+ tokenizers==0.20.1
202
+ # via transformers
203
+ tomlkit==0.12.0
204
+ # via gradio
205
+ torch==2.4.0
206
+ # via
207
+ # rakutenai-7b-chat (pyproject.toml)
208
+ # accelerate
209
+ # bitsandbytes
210
+ tqdm==4.66.5
211
+ # via
212
+ # huggingface-hub
213
+ # transformers
214
+ transformers==4.45.2
215
+ # via rakutenai-7b-chat (pyproject.toml)
216
+ triton==3.0.0
217
+ # via torch
218
+ typer==0.12.5
219
+ # via gradio
220
+ typing-extensions==4.12.2
221
+ # via
222
+ # anyio
223
+ # fastapi
224
+ # gradio
225
+ # gradio-client
226
+ # huggingface-hub
227
+ # pydantic
228
+ # pydantic-core
229
+ # rich
230
+ # spaces
231
+ # torch
232
+ # typer
233
+ # uvicorn
234
+ tzdata==2024.2
235
+ # via pandas
236
+ urllib3==2.2.3
237
+ # via requests
238
+ uvicorn==0.32.0
239
+ # via gradio
240
+ websockets==12.0
241
+ # via gradio-client
style.css CHANGED
@@ -9,9 +9,3 @@ h1 {
9
  background: #1565c0;
10
  border-radius: 100vh;
11
  }
12
-
13
- .contain {
14
- max-width: 900px;
15
- margin: auto;
16
- padding-top: 1.5rem;
17
- }
 
9
  background: #1565c0;
10
  border-radius: 100vh;
11
  }
 
 
 
 
 
 
uv.lock ADDED
The diff for this file is too large to render. See raw diff