Spaces:
Runtime error
Runtime error
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import json
|
3 |
+
import gradio as gr
|
4 |
+
import yolov5
|
5 |
+
from PIL import Image
|
6 |
+
from huggingface_hub import hf_hub_download
|
7 |
+
|
8 |
+
app_title = "Detect defects in bird nest jar"
|
9 |
+
models_ids = ['linhcuem/defects_nest_jar_yolov5']
|
10 |
+
|
11 |
+
current_model_id = models_ids[-1]
|
12 |
+
model = yolov5.load(current_model_id)
|
13 |
+
|
14 |
+
examples = [['test_images/16823291638707408-a2A2448-23gmBAS_40174045.jpg', 0.25, 'linhcuem/defects_nest_jar_yolov5'], ['test_images/16823292102253310-a2A2448-23gmBAS_40174046.jpg', 0.25, 'linhcuem/defects_nest_jar_yolov5'], ['test_images/16823291808953550-a2A2448-23gmBAS_40174048.jpg', 0.25, 'linhcuem/defects_nest_jar_yolov5'], ['test_images/16823291801532480-a2A2448-23gmBAS_40174048.jpg', 0.25, 'linhcuem/defects_nest_jar_yolov5']]
|
15 |
+
|
16 |
+
def predict(image, threshold=0.25, model_id=None):
|
17 |
+
#update model if required
|
18 |
+
global current_model_id
|
19 |
+
global model
|
20 |
+
if model_id != current_model_id:
|
21 |
+
model = yolov5.load(model_id)
|
22 |
+
current_model_id = model_id
|
23 |
+
|
24 |
+
# get model input size
|
25 |
+
config_path = hf_hub_download(repo_id=model_id, filename="config.json")
|
26 |
+
with open(config_path, "r") as f:
|
27 |
+
config = json.load(f)
|
28 |
+
input_size = config["input_size"]
|
29 |
+
|
30 |
+
#perform inference
|
31 |
+
model.conf = threshold
|
32 |
+
results = model(image, size=input_size)
|
33 |
+
numpy_image = results.render()[0]
|
34 |
+
output_image = Image.fromarray(numpy_image)
|
35 |
+
return output_image
|
36 |
+
|
37 |
+
gr.Interface(
|
38 |
+
title=app_title,
|
39 |
+
description="Do anh Dat",
|
40 |
+
article=article,
|
41 |
+
fn=predict,
|
42 |
+
inputs=[
|
43 |
+
gr.Image(type="pil"),
|
44 |
+
gr.Slider(maximum=1, step=0.01, value=0.25),
|
45 |
+
gr.Dropdown(models_ids, value=models_ids[-1]),
|
46 |
+
],
|
47 |
+
outputs=gr.Image(type="pil"),
|
48 |
+
examples=examples,
|
49 |
+
cache_examples=True if examples else Fale,
|
50 |
+
).launch(enable_queue=True)
|