Spaces:
Runtime error
Runtime error
dipankardas011
commited on
removed the torch_dtype
Browse filesSigned-off-by: Dipankar Das <[email protected]>
- app.py +2 -2
- logger.log +43 -0
app.py
CHANGED
@@ -4,7 +4,7 @@ from fastapi.responses import RedirectResponse
|
|
4 |
|
5 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
6 |
import transformers
|
7 |
-
import torch
|
8 |
|
9 |
model = "tiiuae/falcon-7b"
|
10 |
|
@@ -13,11 +13,11 @@ pipeline = transformers.pipeline(
|
|
13 |
"text-generation",
|
14 |
model=model,
|
15 |
tokenizer=tokenizer,
|
16 |
-
torch_dtype=torch.bfloat16,
|
17 |
trust_remote_code=True,
|
18 |
device_map="auto",
|
19 |
)
|
20 |
|
|
|
21 |
|
22 |
app = FastAPI()
|
23 |
|
|
|
4 |
|
5 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
6 |
import transformers
|
7 |
+
# import torch
|
8 |
|
9 |
model = "tiiuae/falcon-7b"
|
10 |
|
|
|
13 |
"text-generation",
|
14 |
model=model,
|
15 |
tokenizer=tokenizer,
|
|
|
16 |
trust_remote_code=True,
|
17 |
device_map="auto",
|
18 |
)
|
19 |
|
20 |
+
# torch_dtype=torch.bfloat16,
|
21 |
|
22 |
app = FastAPI()
|
23 |
|
logger.log
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Downloading shards: 100%|ββββββββββ| 2/2 [03:33<00:00, 99.67s/it]
|
2 |
+
Downloading shards: 100%|ββββββββββ| 2/2 [03:33<00:00, 106.81s/it]
|
3 |
+
Traceback (most recent call last):
|
4 |
+
File "/usr/local/bin/uvicorn", line 8, in <module>
|
5 |
+
sys.exit(main())
|
6 |
+
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 1130, in __call__
|
7 |
+
return self.main(*args, **kwargs)
|
8 |
+
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 1055, in main
|
9 |
+
rv = self.invoke(ctx)
|
10 |
+
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 1404, in invoke
|
11 |
+
return ctx.invoke(self.callback, **ctx.params)
|
12 |
+
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 760, in invoke
|
13 |
+
return __callback(*args, **kwargs)
|
14 |
+
File "/usr/local/lib/python3.9/site-packages/uvicorn/main.py", line 437, in main
|
15 |
+
run(app, **kwargs)
|
16 |
+
File "/usr/local/lib/python3.9/site-packages/uvicorn/main.py", line 463, in run
|
17 |
+
server.run()
|
18 |
+
File "/usr/local/lib/python3.9/site-packages/uvicorn/server.py", line 60, in run
|
19 |
+
return asyncio.run(self.serve(sockets=sockets))
|
20 |
+
File "/usr/local/lib/python3.9/asyncio/runners.py", line 44, in run
|
21 |
+
return loop.run_until_complete(main)
|
22 |
+
File "uvloop/loop.pyx", line 1517, in uvloop.loop.Loop.run_until_complete
|
23 |
+
File "/usr/local/lib/python3.9/site-packages/uvicorn/server.py", line 67, in serve
|
24 |
+
config.load()
|
25 |
+
File "/usr/local/lib/python3.9/site-packages/uvicorn/config.py", line 458, in load
|
26 |
+
self.loaded_app = import_from_string(self.app)
|
27 |
+
File "/usr/local/lib/python3.9/site-packages/uvicorn/importer.py", line 21, in import_from_string
|
28 |
+
module = importlib.import_module(module_str)
|
29 |
+
File "/usr/local/lib/python3.9/importlib/__init__.py", line 127, in import_module
|
30 |
+
return _bootstrap._gcd_import(name[level:], package, level)
|
31 |
+
File "<frozen importlib._bootstrap>", line 1030, in _gcd_import
|
32 |
+
File "<frozen importlib._bootstrap>", line 1007, in _find_and_load
|
33 |
+
File "<frozen importlib._bootstrap>", line 986, in _find_and_load_unlocked
|
34 |
+
File "<frozen importlib._bootstrap>", line 680, in _load_unlocked
|
35 |
+
File "<frozen importlib._bootstrap_external>", line 850, in exec_module
|
36 |
+
File "<frozen importlib._bootstrap>", line 228, in _call_with_frames_removed
|
37 |
+
File "/home/user/app/./app.py", line 12, in <module>
|
38 |
+
pipeline = transformers.pipeline(
|
39 |
+
File "/usr/local/lib/python3.9/site-packages/transformers/pipelines/__init__.py", line 788, in pipeline
|
40 |
+
framework, model = infer_framework_load_model(
|
41 |
+
File "/usr/local/lib/python3.9/site-packages/transformers/pipelines/base.py", line 278, in infer_framework_load_model
|
42 |
+
raise ValueError(f"Could not load model {model} with any of the following classes: {class_tuple}.")
|
43 |
+
ValueError: Could not load model tiiuae/falcon-7b with any of the following classes: (<class 'transformers.models.auto.modeling_auto.AutoModelForCausalLM'>,).
|