alibidaran commited on
Commit
5fc0e99
1 Parent(s): 40cc132

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -5
app.py CHANGED
@@ -3,7 +3,7 @@ import gradio as gr
3
  import numpy as np
4
  from PIL import Image
5
  import cv2
6
- from torchvision import models,transforms
7
  core = Core()
8
 
9
  # Read model to OpenVINO Runtime
@@ -14,14 +14,22 @@ tfms = transforms.Compose([
14
  transforms.ToTensor(),
15
  transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) # imagenet
16
  ])
 
 
 
 
 
 
 
 
17
  def segment_image(filepath):
18
  image=cv2.imread(filepath)
19
  image=cv2.cvtColor(image,cv2.COLOR_BGR2RGB)
20
  image = cv2.resize(image, (512,512))
21
- x=tfms(image.copy()/255.)
22
- #ort_input={ort_session.get_inputs()[0].name:x.cpu().unsqueeze(0).float().numpy()}
23
- #out=ort_session.run(None,ort_input)
24
- out = compiled_model_onnx([x.unsqueeze(0).float().cpu().numpy()])
25
  pred_mask=np.squeeze(np.argmax(out[0],1)).astype(np.uint8)
26
  color_mask=cv2.applyColorMap(pred_mask,cv2.COLORMAP_MAGMA)*10
27
  masked_image=cv2.addWeighted(image,0.6,color_mask,0.4,0.1)
 
3
  import numpy as np
4
  from PIL import Image
5
  import cv2
6
+ #from torchvision import models,transforms
7
  core = Core()
8
 
9
  # Read model to OpenVINO Runtime
 
14
  transforms.ToTensor(),
15
  transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) # imagenet
16
  ])
17
+ def normalize(img):
18
+ img=img.astype(np.float32)
19
+ mean=(0.485, 0.456, 0.406)
20
+ std=(0.229, 0.224, 0.225)
21
+ img/=255.0
22
+ img-=mean
23
+ img/std
24
+ return img
25
  def segment_image(filepath):
26
  image=cv2.imread(filepath)
27
  image=cv2.cvtColor(image,cv2.COLOR_BGR2RGB)
28
  image = cv2.resize(image, (512,512))
29
+ x=normalize(image.copy())
30
+ #ort_input={ort_session.get_inputs()[0].name:x.cpu().unsqueeze(0).float().numpy()}
31
+ #out=ort_session.run(None,ort_input)
32
+ out = compiled_model_onnx(np.expand_dims(x,axis=0).transpose(0,3,1,2))
33
  pred_mask=np.squeeze(np.argmax(out[0],1)).astype(np.uint8)
34
  color_mask=cv2.applyColorMap(pred_mask,cv2.COLORMAP_MAGMA)*10
35
  masked_image=cv2.addWeighted(image,0.6,color_mask,0.4,0.1)