hugging2021
commited on
Commit
•
120c05f
1
Parent(s):
f9d9a78
Update requirements.txt
Browse files- requirements.txt +26 -6
requirements.txt
CHANGED
@@ -1,18 +1,38 @@
|
|
1 |
-
accelerate==0.
|
2 |
colorama
|
3 |
datasets
|
4 |
einops
|
5 |
gradio==4.26.*
|
6 |
hqq==0.1.7.post3
|
7 |
-
jinja2==3.1.
|
8 |
lm_eval==0.3.0
|
9 |
markdown
|
10 |
numba==0.59.*
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
sse-starlette==1.6.5
|
12 |
tiktoken
|
13 |
|
14 |
# llama-cpp-python (CPU only, no AVX2)
|
15 |
-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.
|
16 |
-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases
|
17 |
-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.
|
18 |
-
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.
|
|
|
1 |
+
accelerate==0.30.*
|
2 |
colorama
|
3 |
datasets
|
4 |
einops
|
5 |
gradio==4.26.*
|
6 |
hqq==0.1.7.post3
|
7 |
+
jinja2==3.1.2
|
8 |
lm_eval==0.3.0
|
9 |
markdown
|
10 |
numba==0.59.*
|
11 |
+
numpy==1.26.*
|
12 |
+
optimum==1.17.*
|
13 |
+
pandas
|
14 |
+
peft==0.8.*
|
15 |
+
Pillow>=9.5.0
|
16 |
+
psutil
|
17 |
+
pyyaml
|
18 |
+
requests
|
19 |
+
rich
|
20 |
+
safetensors==0.4.*
|
21 |
+
scipy
|
22 |
+
sentencepiece
|
23 |
+
tensorboard
|
24 |
+
transformers==4.41.*
|
25 |
+
tqdm
|
26 |
+
wandb
|
27 |
+
|
28 |
+
# API
|
29 |
+
SpeechRecognition==3.10.0
|
30 |
+
flask_cloudflared==0.0.14
|
31 |
sse-starlette==1.6.5
|
32 |
tiktoken
|
33 |
|
34 |
# llama-cpp-python (CPU only, no AVX2)
|
35 |
+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.79+cpuavx-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
|
36 |
+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.79+cpuavx-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
|
37 |
+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.79+cpuavx-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
|
38 |
+
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.79+cpuavx-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
|