OmerFarooq commited on
Commit
2f37c1e
1 Parent(s): 03b3f8d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +50 -22
app.py CHANGED
@@ -24,30 +24,46 @@ def img_pros(img):
24
 
25
  #function for creating model
26
  #returns model, its inputs, Xception's last conv output, the whole model's outputs
27
- def create_model_mod():
28
  inputs = keras.Input(shape = (160,160,3))
29
  #normalizing pixel values
30
  r = Rescaling(scale = 1./255)(inputs)
31
  x = base_model(r, training = False)
32
  gap = keras.layers.GlobalAveragePooling2D()(x)
33
- outputs = keras.layers.Dense(1,activation = 'linear')(gap)
34
  model = keras.Model(inputs, outputs)
35
 
 
 
 
 
 
36
  model.compile(
37
- loss = keras.losses.BinaryCrossentropy(from_logits = True),
38
  optimizer = keras.optimizers.Adam(0.001),
39
  metrics = ["accuracy"]
40
  )
41
 
42
  return model, inputs, x, outputs
43
 
44
- def create_heatmap(model, imgs):
45
- #predicting the images and getting the conv outputs and predictions
 
 
 
 
 
46
  with tf.GradientTape() as tape:
47
- maps, preds = model(imgs);
 
 
 
48
 
49
  #computing gradients of predictions w.r.t the feature maps
50
- grads = tape.gradient(preds, maps)
 
 
 
51
 
52
  # global average pooling of each feature map
53
  gap_grads = tf.reduce_mean(grads, axis=(0, 1, 2))
@@ -64,6 +80,8 @@ def create_heatmap(model, imgs):
64
 
65
  return heatmap, preds.numpy()
66
 
 
 
67
  def superimpose_single(heatmap, img, alpha = 0.4):
68
  heatmap = np.uint8(255 * heatmap)
69
 
@@ -85,37 +103,47 @@ def superimpose_single(heatmap, img, alpha = 0.4):
85
 
86
  return superimposed_img
87
 
88
- def gen_grad_img_single(weights, img, alpha = 0.4):
89
- model_mod, input, x, output = create_model_mod()
90
- model_mod.load_weights(weights)
91
- grad_model = Model(input, [x, output])
92
- heatmaps, y_pred = create_heatmap(grad_model, img)
93
 
94
  # for i in range(len(y_pred)):
95
- # if y_pred[i] > 0.5: y_pred[i] = 1
96
- # else: y_pred[i] = 0
97
 
98
  img = superimpose_single(heatmaps, img[0])
99
  return np.array(img).astype('uint8'), y_pred
100
 
101
 
 
 
 
 
 
 
 
 
 
 
 
 
 
102
  weights = "weights.h5"
103
- # img, y_pred = gen_grad_img_single(weights, img)
104
 
105
  def get_grad(img):
106
  img = img_pros(img)
107
- grad_img, y_pred = gen_grad_img_single(weights, img)
108
- pred_class = ""
109
- if y_pred[0] > 0.5: pred_class = "cat"
110
- else: pred_class = "dog"
111
 
112
- text = "Raw Score: " + str(y_pred[0]) + "\nClassification: " + pred_class
113
- return grad_img, text
114
 
115
  demo = gr.Interface(
116
  fn = get_grad,
117
  inputs = gr.Image(type = "pil", shape = (224,224)),
118
- outputs = [gr.Image(type = "numpy", width = 320, height = 320), gr.Textbox(label = 'Prediction', info = '(threshold: 0.5)')],
119
  description = "Visual Explanations from Deep Networks",
120
  title = "Gradient-Weighted Class Activation Mapping (Grad-CAM)"
121
  )
 
24
 
25
  #function for creating model
26
  #returns model, its inputs, Xception's last conv output, the whole model's outputs
27
+ def create_model_mod(classes, activation):
28
  inputs = keras.Input(shape = (160,160,3))
29
  #normalizing pixel values
30
  r = Rescaling(scale = 1./255)(inputs)
31
  x = base_model(r, training = False)
32
  gap = keras.layers.GlobalAveragePooling2D()(x)
33
+ outputs = keras.layers.Dense(classes ,activation = activation)(gap)
34
  model = keras.Model(inputs, outputs)
35
 
36
+ if activation == "linear":
37
+ loss_s = keras.losses.BinaryCrossentropy(from_logits = True)
38
+ else:
39
+ loss_s = keras.losses.BinaryCrossentropy()
40
+
41
  model.compile(
42
+ loss = loss_s,
43
  optimizer = keras.optimizers.Adam(0.001),
44
  metrics = ["accuracy"]
45
  )
46
 
47
  return model, inputs, x, outputs
48
 
49
+
50
+ #create heatmaps of the given images
51
+ #returns the heatmaps and the raw score of predicted class of each image
52
+ def create_heatmap(model, imgs, class_index):
53
+ model.layers[-1].activation = None
54
+
55
+ #predicting the images and getting the conv outputs and predictions from the gradcam model
56
  with tf.GradientTape() as tape:
57
+ maps, preds = model(imgs);
58
+
59
+ # class_channel = tf.expand_dims(preds[:,class_index],axis = 1)
60
+ class_channel = preds[:, class_index]
61
 
62
  #computing gradients of predictions w.r.t the feature maps
63
+ if class_index == -1:
64
+ grads = tape.gradient(preds, maps)
65
+ else:
66
+ grads = tape.gradient(class_channel, maps)
67
 
68
  # global average pooling of each feature map
69
  gap_grads = tf.reduce_mean(grads, axis=(0, 1, 2))
 
80
 
81
  return heatmap, preds.numpy()
82
 
83
+
84
+ #superimpose function buth for a single input image
85
  def superimpose_single(heatmap, img, alpha = 0.4):
86
  heatmap = np.uint8(255 * heatmap)
87
 
 
103
 
104
  return superimposed_img
105
 
106
+ #for generating single gradcam image
107
+ def gen_grad_img_single(grad_model, img, class_index, alpha = 0.4):
108
+ heatmaps, y_pred = create_heatmap(grad_model, img, class_index)
 
 
109
 
110
  # for i in range(len(y_pred)):
111
+ # if y_pred[i] > 0.5: y_pred[i] = 1
112
+ # else: y_pred[i] = 0
113
 
114
  img = superimpose_single(heatmaps, img[0])
115
  return np.array(img).astype('uint8'), y_pred
116
 
117
 
118
+ def gen_grad_both(grad_model, imgs, y_true, size, cols, font_size):
119
+ img_c, y_pred_c = gen_grad_img_single(grad_model, imgs, 0)
120
+ img_d, y_pred_d = gen_grad_img_single(grad_model, imgs, 1)
121
+ y_pred_c = np.around(y_pred_c,3)
122
+ y_pred_d = np.around(y_pred_d,3)
123
+ # show_imgs([img_c, img_d], [y_true, y_true], [size[0], size[1]], cols, [y_pred_c, y_pred_d], font_size = font_size)
124
+
125
+ infer = ""
126
+ if y_pred_c[0] > y_pred_c[1]: infer = "cat"
127
+ else: infer = "dog"
128
+
129
+ return [img_c, img_d], y_pred_c, infer
130
+
131
  weights = "weights.h5"
 
132
 
133
  def get_grad(img):
134
  img = img_pros(img)
135
+ grad_imgs, y_pred, infer = gen_grad_img_single(weights, img)
136
+ # pred_class = ""
137
+ # if y_pred[0] > 0.5: pred_class = "cat"
138
+ # else: pred_class = "dog"
139
 
140
+ text = "Raw Score: " + str(y_pred[0]) + "\nClassification: " + infer
141
+ return grad_imgs, text
142
 
143
  demo = gr.Interface(
144
  fn = get_grad,
145
  inputs = gr.Image(type = "pil", shape = (224,224)),
146
+ outputs = [gr.Image(type = "numpy", width = 320, height = 320), gr.Image(type = "numpy", width = 320, height = 320), gr.Textbox(label = 'Prediction', info = '(threshold: 0.5)')],
147
  description = "Visual Explanations from Deep Networks",
148
  title = "Gradient-Weighted Class Activation Mapping (Grad-CAM)"
149
  )