Esempio n. 1
0
 def __init__(self,
              im_size,
              num_classes,
              attention=True,
              normalize_attn=True,
              init='xavierUniform',
              interpolate=False):
     super(AttnVGG_after, self).__init__()
     if interpolate:
         self.interpolate = Interpolate(size=(im_size, im_size),
                                        mode='nearest')
     else:
         self.interpolate = None
     self.attention = attention
     # conv blocks
     self.conv_block1 = ConvBlock(3, 64, 2)
     self.conv_block2 = ConvBlock(64, 128, 2)
     self.conv_block3 = ConvBlock(128, 256, 3)
     self.conv_block4 = ConvBlock(256, 512, 3)
     self.conv_block5 = ConvBlock(512, 512, 3)
     self.conv_block6 = ConvBlock(512, 512, 2, pool=True)
     self.dense = nn.Conv2d(in_channels=512,
                            out_channels=512,
                            kernel_size=int(im_size / 32),
                            padding=0,
                            bias=True)
     # Projectors & Compatibility functions
     if self.attention:
         self.projector = ProjectorBlock(256, 512)
         self.attn1 = LinearAttentionBlock(in_features=512,
                                           normalize_attn=normalize_attn)
         self.attn2 = LinearAttentionBlock(in_features=512,
                                           normalize_attn=normalize_attn)
         self.attn3 = LinearAttentionBlock(in_features=512,
                                           normalize_attn=normalize_attn)
     # final classification layer
     if self.attention:
         self.classify = nn.Linear(in_features=512 * 3,
                                   out_features=num_classes,
                                   bias=True)
     else:
         self.classify = nn.Linear(in_features=512,
                                   out_features=num_classes,
                                   bias=True)
     # initialize
     if init == 'kaimingNormal':
         weights_init_kaimingNormal(self)
     elif init == 'kaimingUniform':
         weights_init_kaimingUniform(self)
     elif init == 'xavierNormal':
         weights_init_xavierNormal(self)
     elif init == 'xavierUniform':
         weights_init_xavierUniform(self)
     else:
         raise NotImplementedError("Invalid type of initialization!")
Esempio n. 2
0
    def __init__(self, num_classes, attention=True, normalize_attn=True, init='default'):
        super(AttnVGG_before, self).__init__()
        self.attention = attention
        # conv blocks
        self.conv_block1 = ConvBlock(3, 64, 2)
        self.conv_block2 = ConvBlock(64, 128, 2)
        self.conv_block3 = ConvBlock(128, 256, 3)
        self.conv_block4 = ConvBlock(256, 512, 3)
        self.conv_block5 = ConvBlock(512, 512, 3)
        #         self.conv_block6 = ConvBlock(512, 512, 2, pool=True)
        #         self.dense = nn.Conv2d(in_channels=512, out_channels=512, kernel_size=int(im_size/32), padding=0, bias=True)
        self.dense1 = nn.Conv2d(in_channels=512, out_channels=4096, kernel_size=7, padding=0, bias=True)
        self.dense2 = nn.Conv2d(in_channels=4096, out_channels=4096, kernel_size=1, padding=0, bias=True)
        self.dense3 = nn.Conv2d(in_channels=4096, out_channels=512, kernel_size=1, padding=0, bias=True)

        # Projectors & Compatibility functions
        if self.attention:
            self.projector = ProjectorBlock(256, 512)
            self.attn1 = LinearAttentionBlock(in_features=512, normalize_attn=normalize_attn)
            self.attn2 = LinearAttentionBlock(in_features=512, normalize_attn=normalize_attn)
            self.attn3 = LinearAttentionBlock(in_features=512, normalize_attn=normalize_attn)
        # final classification layer
        if self.attention:
            self.classify = nn.Linear(in_features=512 * 3, out_features=num_classes, bias=True)
        else:
            self.classify = nn.Linear(in_features=512, out_features=num_classes, bias=True)
        # initialize
        if init == 'kaimingNormal':
            weights_init_kaimingNormal(self)
        elif init == 'kaimingUniform':
            weights_init_kaimingUniform(self)
        elif init == 'xavierNormal':
            weights_init_xavierNormal(self)
        elif init == 'xavierUniform':
            weights_init_xavierUniform(self)
        else:
            print("Initializing Default weights")
            for m in self.modules():
                if isinstance(m, nn.Conv2d):
                    nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
                    if m.bias is not None:
                        nn.init.constant_(m.bias, 0)
                elif isinstance(m, nn.BatchNorm2d):
                    nn.init.constant_(m.weight, 1)
                    nn.init.constant_(m.bias, 0)
                elif isinstance(m, nn.Linear):
                    nn.init.normal_(m.weight, 0, 0.01)
                    nn.init.constant_(m.bias, 0)
Esempio n. 3
0
    def __init__(self,
                 im_size,
                 num_classes,
                 attention=True,
                 normalize_attn=True,
                 init='xavierUniform',
                 _base_features=64,
                 dropout=0.0):
        super(AttnVGG_before, self).__init__()

        #self.base_features = 64
        self.base_features = _base_features

        self.dropout = nn.Dropout(p=dropout)

        self.attention = attention
        # conv blocks
        self.conv_block1 = ConvBlock(1, self.base_features, 2, dropout=dropout)
        self.conv_block2 = ConvBlock(self.base_features,
                                     self.base_features * 2,
                                     2,
                                     dropout=dropout)
        self.conv_block3 = ConvBlock(self.base_features * 2,
                                     self.base_features * 4,
                                     3,
                                     dropout=dropout)
        self.conv_block4 = ConvBlock(self.base_features * 4,
                                     self.base_features * 8,
                                     3,
                                     dropout=dropout)
        self.conv_block5 = ConvBlock(self.base_features * 8,
                                     self.base_features * 8,
                                     3,
                                     dropout=dropout)
        self.conv_block6 = ConvBlock(self.base_features * 8,
                                     self.base_features * 8,
                                     2,
                                     pool=True,
                                     dropout=dropout)
        self.dense = nn.Conv2d(in_channels=self.base_features * 8,
                               out_channels=self.base_features * 8,
                               kernel_size=int(im_size / 32),
                               padding=0,
                               bias=True)
        # Projectors & Compatibility functions
        if self.attention:
            self.projector = ProjectorBlock(self.base_features * 4,
                                            self.base_features * 8)
            self.attn1 = LinearAttentionBlock(in_features=self.base_features *
                                              8,
                                              normalize_attn=normalize_attn)
            self.attn2 = LinearAttentionBlock(in_features=self.base_features *
                                              8,
                                              normalize_attn=normalize_attn)
            self.attn3 = LinearAttentionBlock(in_features=self.base_features *
                                              8,
                                              normalize_attn=normalize_attn)
        # final classification layer

        if self.attention:
            self.classify = nn.Linear(in_features=(self.base_features * 8) * 3,
                                      out_features=num_classes,
                                      bias=True)
        else:
            self.classify = nn.Linear(in_features=self.base_features * 8,
                                      out_features=num_classes,
                                      bias=True)
        # initialize
        if init == 'kaimingNormal':
            weights_init_kaimingNormal(self)
        elif init == 'kaimingUniform':
            weights_init_kaimingUniform(self)
        elif init == 'xavierNormal':
            weights_init_xavierNormal(self)
        elif init == 'xavierUniform':
            weights_init_xavierUniform(self)
        else:
            raise NotImplementedError("Invalid type of initialization!")