FantasticGNU commited on
Commit
4f24f82
·
1 Parent(s): b5bac54

Update model/openllama.py

Browse files
Files changed (1) hide show
  1. model/openllama.py +4 -0
model/openllama.py CHANGED
@@ -632,8 +632,10 @@ class OpenLLAMAPEFTModel(nn.Module):
632
  anomaly_map = (100.0 * patch_tokens[layer] @ feats_text_tensor.transpose(-2,-1))
633
  B, L, C = anomaly_map.shape
634
  H = int(np.sqrt(L))
 
635
  anomaly_map = F.interpolate(anomaly_map.permute(0, 2, 1).view(B, 2, H, H),
636
  size=224, mode='bilinear', align_corners=True)
 
637
  anomaly_map = torch.softmax(anomaly_map, dim=1)
638
  anomaly_maps.append(anomaly_map[:,1,:,:])
639
 
@@ -657,7 +659,9 @@ class OpenLLAMAPEFTModel(nn.Module):
657
  sims.append(sim_max)
658
 
659
  sim = torch.mean(torch.stack(sims,dim=0), dim=0).reshape(1,1,16,16)
 
660
  sim = F.interpolate(sim,size=224, mode='bilinear', align_corners=True)
 
661
  anomaly_map_ret = 1 - sim # (anomaly_map_ret + 1 - sim) / 2
662
 
663
 
 
632
  anomaly_map = (100.0 * patch_tokens[layer] @ feats_text_tensor.transpose(-2,-1))
633
  B, L, C = anomaly_map.shape
634
  H = int(np.sqrt(L))
635
+ anomaly_map = anomaly_map.to(torch.float16)
636
  anomaly_map = F.interpolate(anomaly_map.permute(0, 2, 1).view(B, 2, H, H),
637
  size=224, mode='bilinear', align_corners=True)
638
+ anomaly_map = anomaly_map.to(torch.bfloat16)
639
  anomaly_map = torch.softmax(anomaly_map, dim=1)
640
  anomaly_maps.append(anomaly_map[:,1,:,:])
641
 
 
659
  sims.append(sim_max)
660
 
661
  sim = torch.mean(torch.stack(sims,dim=0), dim=0).reshape(1,1,16,16)
662
+ anomaly_map = anomaly_map.to(torch.float16)
663
  sim = F.interpolate(sim,size=224, mode='bilinear', align_corners=True)
664
+ anomaly_map = anomaly_map.to(torch.bfloat16)
665
  anomaly_map_ret = 1 - sim # (anomaly_map_ret + 1 - sim) / 2
666
 
667