Spaces:
Runtime error
Runtime error
import torch.nn as nn | |
import torch.nn.functional as F | |
import cliport.utils.utils as utils | |
from cliport.models.resnet import IdentityBlock, ConvBlock | |
from cliport.models.core.unet import Up | |
from cliport.models.clip_lingunet_lat import CLIPLingUNetLat | |
from cliport.models.core import fusion | |
from cliport.models.core.fusion import FusionConvLat | |
class CLIPFilmLingUNet(CLIPLingUNetLat): | |
""" CLIP RN50 with U-Net skip connections """ | |
def __init__(self, input_shape, output_dim, cfg, device, preprocess): | |
super().__init__(input_shape, output_dim, cfg, device, preprocess) | |
def _build_decoder(self): | |
# language | |
self.lang_fusion_type = 'film' | |
self.lang_fuser1 = fusion.names[self.lang_fusion_type](input_dim=self.input_dim // 2) | |
self.lang_fuser2 = fusion.names[self.lang_fusion_type](input_dim=self.input_dim // 4) | |
self.lang_fuser3 = fusion.names[self.lang_fusion_type](input_dim=self.input_dim // 8) | |
self.proj_input_dim = 1024 | |
self.lang_gamma1 = nn.Linear(self.proj_input_dim, 1024) | |
self.lang_gamma2 = nn.Linear(self.proj_input_dim, 512) | |
self.lang_gamma3 = nn.Linear(self.proj_input_dim, 256) | |
self.lang_beta1 = nn.Linear(self.proj_input_dim, 1024) | |
self.lang_beta2 = nn.Linear(self.proj_input_dim, 512) | |
self.lang_beta3 = nn.Linear(self.proj_input_dim, 256) | |
# vision | |
self.conv1 = nn.Sequential( | |
nn.Conv2d(self.input_dim, 1024, kernel_size=3, stride=1, padding=1, bias=False), | |
nn.ReLU(True) | |
) | |
self.up1 = Up(2048, 1024 // self.up_factor, self.bilinear) | |
self.lat_fusion1 = FusionConvLat(input_dim=1024+512, output_dim=512) | |
self.up2 = Up(1024, 512 // self.up_factor, self.bilinear) | |
self.lat_fusion2 = FusionConvLat(input_dim=512+256, output_dim=256) | |
self.up3 = Up(512, 256 // self.up_factor, self.bilinear) | |
self.lat_fusion3 = FusionConvLat(input_dim=256+128, output_dim=128) | |
self.layer1 = nn.Sequential( | |
ConvBlock(128, [64, 64, 64], kernel_size=3, stride=1, batchnorm=self.batchnorm), | |
IdentityBlock(64, [64, 64, 64], kernel_size=3, stride=1, batchnorm=self.batchnorm), | |
nn.UpsamplingBilinear2d(scale_factor=2), | |
) | |
self.lat_fusion4 = FusionConvLat(input_dim=128+64, output_dim=64) | |
self.layer2 = nn.Sequential( | |
ConvBlock(64, [32, 32, 32], kernel_size=3, stride=1, batchnorm=self.batchnorm), | |
IdentityBlock(32, [32, 32, 32], kernel_size=3, stride=1, batchnorm=self.batchnorm), | |
nn.UpsamplingBilinear2d(scale_factor=2), | |
) | |
self.lat_fusion5 = FusionConvLat(input_dim=64+32, output_dim=32) | |
self.layer3 = nn.Sequential( | |
ConvBlock(32, [16, 16, 16], kernel_size=3, stride=1, batchnorm=self.batchnorm), | |
IdentityBlock(16, [16, 16, 16], kernel_size=3, stride=1, batchnorm=self.batchnorm), | |
nn.UpsamplingBilinear2d(scale_factor=2), | |
) | |
self.lat_fusion6 = FusionConvLat(input_dim=32+16, output_dim=16) | |
self.conv2 = nn.Sequential( | |
nn.Conv2d(16, self.output_dim, kernel_size=1) | |
) | |
def forward(self, x, lat, l): | |
x = self.preprocess(x, dist='clip') | |
in_type = x.dtype | |
in_shape = x.shape | |
x = x[:,:3] # select RGB | |
x, im = self.encode_image(x) | |
x = x.to(in_type) | |
l_enc, l_emb, l_mask = self.encode_text(l) | |
l_input = l_enc | |
l_input = l_input.to(dtype=x.dtype) | |
assert x.shape[1] == self.input_dim | |
x = self.conv1(x) | |
x = self.lang_fuser1(x, l_input, gamma=self.lang_gamma1, beta=self.lang_beta1) | |
x = self.up1(x, im[-2]) | |
x = self.lat_fusion1(x, lat[-6]) | |
x = self.lang_fuser2(x, l_input, gamma=self.lang_gamma2, beta=self.lang_beta2) | |
x = self.up2(x, im[-3]) | |
x = self.lat_fusion2(x, lat[-5]) | |
x = self.lang_fuser3(x, l_input, gamma=self.lang_gamma3, beta=self.lang_beta3) | |
x = self.up3(x, im[-4]) | |
x = self.lat_fusion3(x, lat[-4]) | |
x = self.layer1(x) | |
x = self.lat_fusion4(x, lat[-3]) | |
x = self.layer2(x) | |
x = self.lat_fusion5(x, lat[-2]) | |
x = self.layer3(x) | |
x = self.lat_fusion6(x, lat[-1]) | |
x = self.conv2(x) | |
x = F.interpolate(x, size=(in_shape[-2], in_shape[-1]), mode='bilinear') | |
return x |