Spaces:
Running
on
Zero
Running
on
Zero
Update for ZeroGPU
Browse files- README.md +1 -1
- app.py +2 -0
- requirements.txt +3 -2
README.md
CHANGED
@@ -4,7 +4,7 @@ emoji: π
|
|
4 |
colorFrom: purple
|
5 |
colorTo: gray
|
6 |
sdk: gradio
|
7 |
-
sdk_version: 3.47.
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
suggested_hardware: a10g-small
|
|
|
4 |
colorFrom: purple
|
5 |
colorTo: gray
|
6 |
sdk: gradio
|
7 |
+
sdk_version: 3.47.1
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
suggested_hardware: a10g-small
|
app.py
CHANGED
@@ -2,6 +2,7 @@ from threading import Thread
|
|
2 |
from typing import Iterator
|
3 |
|
4 |
import gradio as gr
|
|
|
5 |
import torch
|
6 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
7 |
|
@@ -39,6 +40,7 @@ if torch.cuda.is_available():
|
|
39 |
tokenizer.use_default_system_prompt = False
|
40 |
|
41 |
|
|
|
42 |
def generate(
|
43 |
message: str,
|
44 |
chat_history: list[tuple[str, str]],
|
|
|
2 |
from typing import Iterator
|
3 |
|
4 |
import gradio as gr
|
5 |
+
import spaces
|
6 |
import torch
|
7 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
8 |
|
|
|
40 |
tokenizer.use_default_system_prompt = False
|
41 |
|
42 |
|
43 |
+
@spaces.GPU
|
44 |
def generate(
|
45 |
message: str,
|
46 |
chat_history: list[tuple[str, str]],
|
requirements.txt
CHANGED
@@ -1,8 +1,9 @@
|
|
1 |
accelerate==0.23.0
|
2 |
bitsandbytes==0.41.1
|
3 |
-
gradio==3.47.
|
4 |
protobuf==3.20.3
|
5 |
scipy==1.11.2
|
6 |
sentencepiece==0.1.99
|
7 |
-
|
|
|
8 |
transformers==4.34.0
|
|
|
1 |
accelerate==0.23.0
|
2 |
bitsandbytes==0.41.1
|
3 |
+
gradio==3.47.1
|
4 |
protobuf==3.20.3
|
5 |
scipy==1.11.2
|
6 |
sentencepiece==0.1.99
|
7 |
+
spaces==0.16.1
|
8 |
+
torch==2.0.0
|
9 |
transformers==4.34.0
|