def init_weights(self, pretrained=None):
        if isinstance(pretrained, str):
            logger = logging.getLogger()
            load_checkpoint(self, pretrained, strict=False, logger=logger)
        elif pretrained is None:
            for m in self.features.modules():
                if isinstance(m, nn.Conv2d):
                    kaiming_init(m)
                elif isinstance(m, nn.BatchNorm2d):
                    constant_init(m, 1)
        else:
            raise TypeError('pretrained must be a str or None')

        for f in [
                self.Norm1, self.Norm2, self.Norm3, self.Norm4, self.icn1,
                self.icn2, self.icn3, self.dsc1, self.dsc2, self.dsc3,
                self.agent1, self.agent2, self.agent3, self.proj1, self.proj2,
                self.proj3, self.convert1, self.convert2, self.convert3,
                self.merge1, self.merge2, self.merge3, self.ibn1, self.ibn2,
                self.extra
        ]:
            for m in f.modules():
                if isinstance(m, nn.Conv2d):
                    xavier_init(m, distribution='uniform')
                elif isinstance(m, nn.BatchNorm2d):
                    constant_init(m, 1)
Exemplo n.º 2
0
    def init_weights(self):
        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                xavier_init(m, distribution='uniform')

        if hasattr(self, 'attention') and self.attention is not None:
            self.attention.init_weights()
Exemplo n.º 3
0
    def init_weights(self, pretrained=None):
        """Initialize the weights in backbone.

        Args:
            pretrained (str, optional): Path to pre-trained weights.
                Defaults to None.
        """
        if isinstance(pretrained, str):
            logger = get_root_logger()
            load_checkpoint(self, pretrained, strict=False, logger=logger)
        elif pretrained is None:
            for m in self.features.modules():
                if isinstance(m, nn.Conv2d):
                    kaiming_init(m)
                elif isinstance(m, nn.BatchNorm2d):
                    constant_init(m, 1)
                elif isinstance(m, nn.Linear):
                    normal_init(m, std=0.01)
        else:
            raise TypeError('pretrained must be a str or None')

        for m in self.extra.modules():
            if isinstance(m, nn.Conv2d):
                xavier_init(m, distribution='uniform')

        constant_init(self.l2_norm, self.l2_norm.scale)
Exemplo n.º 4
0
 def init_weights(self):
     # initialize weight and bias
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             xavier_init(m)
         elif isinstance(m, nn.BatchNorm2d):
             uniform_init(m)
Exemplo n.º 5
0
 def init_weights(self):
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             xavier_init(m, distribution='uniform')
     nn.init.constant_(self.conv_final[-1].bias, 0.0)
     nn.init.constant_(self.atten_conv_final[-1].bias, 0.0)
     self.atten.init_weights()
Exemplo n.º 6
0
 def init_weights(self):
     for g in self.grabs:
         g.init_weights()
     self.grab_extra.init_weights()
     for m in self.conv_extra.modules():
         if isinstance(m, nn.Conv2d):
             xavier_init(m, distribution='uniform')
 def init_weights(self):
     for m in self.modules():
         if isinstance(m, (nn.Conv2d, nn.ConvTranspose2d)):
             xavier_init(m, distribution='uniform')
     for m in self.modules():
         if isinstance(m, CARAFEPack):
             m.init_weights()
Exemplo n.º 8
0
 def init_weights(self):
     if self.num_lstms > 0:
         for lstm_cell in self.lstm_cells:
             lstm_cell.init_weights()
     if self.num_extra_convs > 0:
         for conv in self.extra_convs:
             xavier_init(conv)
Exemplo n.º 9
0
    def init_weights(self):
        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                xavier_init(m, distribution='uniform')

        normal_init(self.r_conv, std=0.01)

        # normal_init(self.scale_conv_k_2, std=0.01)
        # # normal_init(self.scale_conv_q_2, std=0.01)
        # normal_init(self.scale_conv_v_2, std=0.01)
        #
        # normal_init(self.scale_conv_k_3, std=0.01)
        # # normal_init(self.scale_conv_q_3, std=0.01)
        # normal_init(self.scale_conv_v_3, std=0.01)
        #
        # normal_init(self.scale_conv_k_4, std=0.01)
        # # normal_init(self.scale_conv_q_4, std=0.01)
        # normal_init(self.scale_conv_v_4, std=0.01)
        #
        # normal_init(self.scale_conv_k_5, std=0.01)
        # # normal_init(self.scale_conv_q_5, std=0.01)
        # normal_init(self.scale_conv_v_5, std=0.01)
        #
        # normal_init(self.scale_conv_k_6, std=0.01)
        # # normal_init(self.scale_conv_q_6, std=0.01)
        # normal_init(self.scale_conv_v_6, std=0.01)

        normal_init(self.batch_, std=0.01)
        normal_init(self.batch_11, std=0.01)
 def init_weights(self):
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             xavier_init(m, distribution='uniform')
     # normal_init(self.content_encoder, std=0.001)
     kaiming_init(self.channel_compressor)
     kaiming_init(self.content_encoder)
Exemplo n.º 11
0
def init_method(m):
    if isinstance(m, nn.Conv2d):
        xavier_init(m, distribution='uniform', bias=0)
    elif isinstance(m, nn.ConvTranspose2d):
        xavier_init(m, distribution='uniform', bias=0)
    elif isinstance(m, nn.BatchNorm2d):
        constant_init(m, 1)
Exemplo n.º 12
0
    def init_weights(self):
        normal_init(self.fc_cls, std=0.01)
        normal_init(self.fc_reg, std=0.001)

        for m in self.fc_branch.modules():
            if isinstance(m, nn.Linear):
                xavier_init(m, distribution='uniform')
Exemplo n.º 13
0
 def init_weights(self):
     if self.num_rnns > 0:
         for rnn_cell in self.rnn_cells:
             rnn_cell.init_weights()
     if self.num_extra_convs > 0:
         for conv in self.extra_convs:
             xavier_init(conv)
 def init_weights(self):
     #if isinstance(pretrained, str):
     #    logger = logging.getLogger()
     #    load_checkpoint(self, pretrained, strict=False, logger=logger)
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             xavier_init(m, distribution='uniform', bias=0)
Exemplo n.º 15
0
    def init_weights(self):
        """Initialize weights of the bbox head."""
        for m in self.modules():
            if isinstance(m, (nn.Conv2d, nn.Conv1d)):
                xavier_init(m, distribution='uniform')

        normal_init(self.conv_reg[-1].conv, mean=0, std=0.001)
Exemplo n.º 16
0
    def init_func(m):
        """Initialization function.

        Args:
            m (nn.Module): Module to be initialized.
        """
        classname = m.__class__.__name__
        if hasattr(m, 'weight') and (classname.find('Conv') != -1
                                     or classname.find('Linear') != -1):
            if init_type == 'normal':
                normal_init(m, 0.0, init_gain)
            elif init_type == 'xavier':
                xavier_init(m, gain=init_gain, distribution='normal')
            elif init_type == 'kaiming':
                kaiming_init(m,
                             a=0,
                             mode='fan_in',
                             nonlinearity='leaky_relu',
                             distribution='normal')
            elif init_type == 'orthogonal':
                init.orthogonal_(m.weight, gain=init_gain)
                init.constant_(m.bias.data, 0.0)
            else:
                raise NotImplementedError(
                    f"Initialization method '{init_type}' is not implemented")
        elif classname.find('BatchNorm2d') != -1:
            # BatchNorm Layer's weight is not a matrix;
            # only normal distribution applies.
            normal_init(m, 1.0, init_gain)
Exemplo n.º 17
0
 def init_weights(self):
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             xavier_init(m, distribution='uniform')
         elif isinstance(m, nn.BatchNorm2d):
             m.weight.data.fill_(1)
             m.bias.data.zero_()
Exemplo n.º 18
0
 def init_weights(self):
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             xavier_init(m, distribution='uniform')
     for m in self.high_lateral_conv_attention.modules():
         if isinstance(m, nn.Conv2d):
             xavier_init(m, distribution='uniform')
Exemplo n.º 19
0
    def init_weights(self, pretrained=None):
        if isinstance(pretrained, str):
            #self.encoder.conv1.weight.data[:, 3:, :, :] = 0

            logger = get_root_logger()
            load_checkpoint(self, pretrained, strict=False, logger=logger)

            if (self.in_channels) > 3:
                print(
                    f'modifying input layer to accept {self.in_channels} channels'
                )
                net_encoder_sd = self.encoder.state_dict()
                conv1_weights = net_encoder_sd['conv1.weight']

                c_out, c_in, h, w = conv1_weights.size()
                conv1_mod = torch.zeros(c_out, self.in_channels, h, w)
                conv1_mod[:, :3, :, :] = conv1_weights

                conv1 = self.encoder.conv1
                conv1.in_channels = self.in_channels
                conv1.weight = torch.nn.Parameter(conv1_mod)

                self.encoder.conv1 = conv1

                net_encoder_sd['conv1.weight'] = conv1_mod

                self.encoder.load_state_dict(net_encoder_sd)

        elif pretrained is None:

            if (self.in_channels) > 3:
                print(
                    f'modifying input layer to accept {self.in_channels} channels'
                )
                net_encoder_sd = self.encoder.state_dict()
                conv1_weights = net_encoder_sd['conv1.weight']

                c_out, c_in, h, w = conv1_weights.size()
                conv1_mod = torch.zeros(c_out, self.in_channels, h, w)
                conv1_mod[:, :3, :, :] = conv1_weights

                conv1 = self.encoder.conv1
                conv1.in_channels = self.in_channels
                conv1.weight = torch.nn.Parameter(conv1_mod)

                self.encoder.conv1 = conv1

                net_encoder_sd['conv1.weight'] = conv1_mod

                self.encoder.load_state_dict(net_encoder_sd)

            for m in self.modules():
                if isinstance(m, nn.Conv2d):
                    xavier_init(m)
                elif isinstance(m, nn.BatchNorm2d):
                    constant_init(m, 1)
        else:
            raise TypeError(f'"pretrained" must be a str or None.'
                            f'But received {type(pretrained)}')
Exemplo n.º 20
0
 def init_weights(self):
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             xavier_init(m)
         elif isinstance(m, nn.BatchNorm2d):
             constant_init(m, 1)
         elif isinstance(m, nn.Linear):
             normal_init(m, std=0.01)
Exemplo n.º 21
0
    def init_weights(self):
        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                xavier_init(m, distribution='uniform')

        for m in self.modules():
            if isinstance(m, ModulatedDeformConvPack):
                constant_init(m.conv_offset, 0)
 def init_weights(self):
     """Initialize model weights."""
     for m in self.conv_blocks:
         if isinstance(m, nn.Conv2d):
             normal_init(m, std=0.001, bias=0)
     for m in self.fc_layer.named_modules():
         if isinstance(m, nn.Linear):
             xavier_init(m, gain=0.01)
Exemplo n.º 23
0
 def init_weights(self):
     """Initialize the weights of FPN module."""
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             if self.init_type == 'uniform':
                 xavier_init(m, distribution='uniform')
             elif self.init_type == 'normal':
                 xavier_init(m, distribution='normal')
Exemplo n.º 24
0
    def init_weights(self):
        # conv layers are already initialized by ConvModule
        normal_init(self.fc_cls, std=0.01)
        normal_init(self.fc_reg, std=0.001)

        for m in self.fc_branch.modules():
            if isinstance(m, nn.Linear):
                xavier_init(m, distribution='uniform')
Exemplo n.º 25
0
 def init_weights(self):
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             xavier_init(m, distribution='uniform')
     for m in [self.proto_logits, self.segm_logits]:
         nn.init.kaiming_normal_(
             m.weight, mode='fan_out', nonlinearity='relu')
         nn.init.constant_(m.bias, 0)
Exemplo n.º 26
0
 def init_weights(self):
     for m in self.modules():
         if isinstance(m, nn.Linear):
             normal_init(m, std=0.01)
         if isinstance(m, nn.Conv3d):
             xavier_init(m, distribution='uniform')
         if isinstance(m, nn.BatchNorm3d):
             constant_init(m, 1)
Exemplo n.º 27
0
 def init_weights(self):
     """Initialize the weights of module."""
     for m in self.modules():
         if isinstance(m, (nn.Conv2d, nn.ConvTranspose2d)):
             xavier_init(m, distribution='uniform')
     for m in self.modules():
         if isinstance(m, CARAFE_se_3):
             m.init_weights()
Exemplo n.º 28
0
    def init_weights(self):
        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                xavier_init(m, distribution='uniform')

        for _, m in self.deconv_layers.named_modules():
            if isinstance(m, nn.BatchNorm2d):
                nn.init.constant_(m.weight, 1)
                nn.init.constant_(m.bias, 0)
Exemplo n.º 29
0
 def init_weights(self):
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             xavier_init(m, distribution='uniform')
     if self.use_skip:
         nn.init.constant_(self.conv_final[-1].bias, 0.0)
     else:
         nn.init.constant_(self.conv_final[-1].bias[0], 1.0)
         nn.init.constant_(self.conv_final[-1].bias[1], 0.0)
Exemplo n.º 30
0
    def init_weights(self):
        for m in self.modules():
            if isinstance(m, nn.Conv3d):
                xavier_init(m, distribution='uniform')
            if isinstance(m, nn.BatchNorm3d):
                constant_init(m, 1)

        if self.aux_head is not None:
            self.aux_head.init_weights()