Spaces:
Runtime error
Runtime error
Upload app.py
Browse files
app.py
ADDED
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
|
3 |
+
import torch
|
4 |
+
from monai.networks.nets import DenseNet121
|
5 |
+
|
6 |
+
import gradio as gr
|
7 |
+
|
8 |
+
#from PIL import Image
|
9 |
+
|
10 |
+
model = DenseNet121(spatial_dims=2, in_channels=1, out_channels=6)
|
11 |
+
model.load_state_dict(torch.load('weights/mednist_model.pth', map_location=torch.device('cpu')))
|
12 |
+
|
13 |
+
from monai.transforms import (
|
14 |
+
EnsureChannelFirst,
|
15 |
+
Compose,
|
16 |
+
LoadImage,
|
17 |
+
ScaleIntensity,
|
18 |
+
)
|
19 |
+
|
20 |
+
test_transforms = Compose(
|
21 |
+
[LoadImage(image_only=True), EnsureChannelFirst(), ScaleIntensity()]
|
22 |
+
)
|
23 |
+
|
24 |
+
class_names = [
|
25 |
+
'AbdomenCT', 'BreastMRI', 'CXR', 'ChestCT', 'Hand', 'HeadCT'
|
26 |
+
]
|
27 |
+
|
28 |
+
import os, glob
|
29 |
+
|
30 |
+
#examples_dir = './samples'
|
31 |
+
#example_files = glob.glob(os.path.join(examples_dir, '*.jpg'))
|
32 |
+
|
33 |
+
def classify_image(image_filepath):
|
34 |
+
input = test_transforms(image_filepath)
|
35 |
+
|
36 |
+
model.eval()
|
37 |
+
with torch.no_grad():
|
38 |
+
pred = model(input.unsqueeze(dim=0))
|
39 |
+
|
40 |
+
prob = torch.nn.functional.softmax(pred[0], dim=0)
|
41 |
+
|
42 |
+
confidences = {class_names[i]: float(prob[i]) for i in range(6)}
|
43 |
+
print(confidences)
|
44 |
+
|
45 |
+
return confidences
|
46 |
+
|
47 |
+
|
48 |
+
with gr.Blocks(title="Medical Image Classification- ClassCat",
|
49 |
+
css=".gradio-container {background:mintcream;}"
|
50 |
+
) as demo:
|
51 |
+
gr.HTML("""<div style="font-family:'Times New Roman', 'Serif'; font-size:16pt; font-weight:bold; text-align:center; color:royalblue;">Medical Image Classification with MONAI</div>""")
|
52 |
+
|
53 |
+
with gr.Row():
|
54 |
+
input_image = gr.Image(type="filepath", image_mode="L", shape=(64, 64))
|
55 |
+
output_label=gr.Label(label="Probabilities", num_top_classes=3)
|
56 |
+
|
57 |
+
send_btn = gr.Button("Infer")
|
58 |
+
send_btn.click(fn=classify_image, inputs=input_image, outputs=output_label)
|
59 |
+
|
60 |
+
with gr.Row():
|
61 |
+
gr.Examples(['./samples/mednist_AbdomenCT00.png'], label='Sample images : AbdomenCT', inputs=input_image)
|
62 |
+
gr.Examples(['./samples/mednist_CXR02.png'], label='CXR', inputs=input_image)
|
63 |
+
gr.Examples(['./samples/mednist_ChestCT08.png'], label='ChestCT', inputs=input_image)
|
64 |
+
gr.Examples(['./samples/mednist_Hand01.png'], label='Hand', inputs=input_image)
|
65 |
+
gr.Examples(['./samples/mednist_HeadCT07.png'], label='HeadCT', inputs=input_image)
|
66 |
+
|
67 |
+
#demo.queue(concurrency_count=3)
|
68 |
+
demo.launch(debug=True)
|