Spaces:
Running
on
Zero
Running
on
Zero
BenBranyon
commited on
Commit
•
fd0e110
1
Parent(s):
50111f9
Update app.py
Browse files
app.py
CHANGED
@@ -13,7 +13,7 @@ DEFAULT_MAX_NEW_TOKENS = 512
|
|
13 |
MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
|
14 |
|
15 |
#Inference API Code
|
16 |
-
#client = InferenceClient("
|
17 |
|
18 |
#Transformers Code
|
19 |
if torch.cuda.is_available():
|
|
|
13 |
MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
|
14 |
|
15 |
#Inference API Code
|
16 |
+
#client = InferenceClient("Qwen/Qwen2.5-7B-Instruct")
|
17 |
|
18 |
#Transformers Code
|
19 |
if torch.cuda.is_available():
|