Spaces:
Sleeping
Sleeping
Preparing submission
Browse files- requirements.txt +1 -1
- tasks/image.py +2 -2
requirements.txt
CHANGED
@@ -14,4 +14,4 @@ torch==2.5.1
|
|
14 |
torchvision==0.20.1
|
15 |
onnx==1.17.0
|
16 |
onnxslim==0.1.48
|
17 |
-
onnxruntime
|
|
|
14 |
torchvision==0.20.1
|
15 |
onnx==1.17.0
|
16 |
onnxslim==0.1.48
|
17 |
+
onnxruntime
|
tasks/image.py
CHANGED
@@ -112,7 +112,7 @@ async def evaluate_image(request: ImageEvaluationRequest):
|
|
112 |
# print("CUDA available, loading FP16 pytorch model")
|
113 |
model_name = "pruned_fp16.pt"
|
114 |
model = YOLO(Path(model_path, model_name), task="detect")
|
115 |
-
#
|
116 |
device_name = device("cuda" if is_available() else "cpu")
|
117 |
IMGSIZE = 1280
|
118 |
# # If not, load FP16 ONNX model
|
@@ -120,7 +120,7 @@ async def evaluate_image(request: ImageEvaluationRequest):
|
|
120 |
# print("CUDA not, available, loading ONNX model")
|
121 |
# model_name = "640_fp16_cpu.onnx"
|
122 |
# model = YOLO(Path(model_path, model_name), task="detect")
|
123 |
-
#
|
124 |
# IMGSIZE = 640 # required to make CPU inference a bit fast
|
125 |
|
126 |
predictions = []
|
|
|
112 |
# print("CUDA available, loading FP16 pytorch model")
|
113 |
model_name = "pruned_fp16.pt"
|
114 |
model = YOLO(Path(model_path, model_name), task="detect")
|
115 |
+
# device_name = device("cuda")
|
116 |
device_name = device("cuda" if is_available() else "cpu")
|
117 |
IMGSIZE = 1280
|
118 |
# # If not, load FP16 ONNX model
|
|
|
120 |
# print("CUDA not, available, loading ONNX model")
|
121 |
# model_name = "640_fp16_cpu.onnx"
|
122 |
# model = YOLO(Path(model_path, model_name), task="detect")
|
123 |
+
# device_name = device("cpu")
|
124 |
# IMGSIZE = 640 # required to make CPU inference a bit fast
|
125 |
|
126 |
predictions = []
|