Exemplo n.º 1
0
 def init_weights(self):
     for conv in self.convs:
         kaiming_init(conv)
     for fc in self.fcs:
         kaiming_init(fc,
                      a=1,
                      mode='fan_in',
                      nonlinearity='leaky_relu',
                      distribution='uniform')
     normal_init(self.fc_mask_iou, std=0.01)
Exemplo n.º 2
0
 def init_weights(self):
     """Initiate the parameters either from existing checkpoint or from
     scratch."""
     # Override the init_weights of i3d
     super().init_weights()
     for module_name in self.lateral_connections:
         layer = getattr(self, module_name)
         for m in layer.modules():
             if isinstance(m, (nn.Conv3d, nn.Conv2d)):
                 kaiming_init(m)
 def init_weights(self):
     for m in self.modules():
         if hasattr(m, 'kaiming_init') and m.kaiming_init:
             kaiming_init(
                 m,
                 mode='fan_in',
                 nonlinearity='leaky_relu',
                 bias=0,
                 distribution='uniform',
                 a=1)
Exemplo n.º 4
0
 def init_weights(self, pretrained=None):
     if isinstance(pretrained, str):
         logger = logging.getLogger()
         load_checkpoint(self, pretrained, strict=False, logger=logger)
     elif pretrained is None:
         for m in self.modules():
             if isinstance(m, nn.Conv2d):
                 kaiming_init(m)
     else:
         raise TypeError('pretrained must be a str or None')
Exemplo n.º 5
0
 def init_weights(self, pretrained=None):
     super(VGG, self).init_weights(pretrained)
     if pretrained is None:
         for m in self.modules():
             if isinstance(m, nn.Conv2d):
                 kaiming_init(m)
             elif isinstance(m, _BatchNorm):
                 constant_init(m, 1)
             elif isinstance(m, nn.Linear):
                 normal_init(m, std=0.01)
Exemplo n.º 6
0
    def init_weights(self):
        """Initialize model weights."""

        for m in self.predict_layers.modules():
            if isinstance(m, nn.Conv2d):
                kaiming_init(m)
            elif isinstance(m, nn.BatchNorm2d):
                constant_init(m, 1)
            elif isinstance(m, nn.Linear):
                normal_init(m, std=0.01)
Exemplo n.º 7
0
 def init_weights(self):
     if isinstance(self.pretrained, str):
         logger = logging.getLogger()
         load_checkpoint(self, self.pretrained, strict=False, logger=logger)
     elif self.pretrained is None:
         for m in self.modules():
             if isinstance(m, nn.Conv3d):
                 kaiming_init(m)
             elif isinstance(m, nn.BatchNorm3d):
                 constant_init(m, 1)
Exemplo n.º 8
0
 def init_weights(self, pretrained=None):
     if isinstance(pretrained, str):
         logger = logging.getLogger()
         load_checkpoint(self, pretrained, strict=False, logger=logger)
     elif pretrained is None:
         for m in self.modules():
             if isinstance(m, nn.Conv2d):
                 kaiming_init(m)
             elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
                 constant_init(m, 1)
Exemplo n.º 9
0
    def __init__(self,
                 depth,
                 num_stages=4,
                 strides=(1, 2, 2, 2),
                 dilations=(1, 1, 1, 1),
                 out_indices=(0, 1, 2, 3),
                 style='pytorch',
                 frozen_stages=-1,
                 norm_cfg=dict(type='BN', requires_grad=True),  # diff with YY: normalize=dict(type='BN', frozen=False)
                 norm_eval=True,
                 dcn=None,
                 stage_with_dcn=(False, False, False, False),
                 with_cp=False,
                 zero_init_residual=True):
        super(MulCatResnet, self).__init__()
        # ResNet used for processing RGB images
        self.resnet_rgb = ResNet(
            depth=depth,
            num_stages=num_stages,
            strides=strides,
            dilations=dilations,
            out_indices=out_indices,
            style=style,
            frozen_stages=frozen_stages,
            norm_cfg=norm_cfg,
            norm_eval=norm_eval,
            dcn=dcn,
            stage_with_dcn=stage_with_dcn,
            with_cp=with_cp,
            zero_init_residual=zero_init_residual
        )

        # ResNet used for processing thermal images(thermal images should be expanded to three channels)
        self.resnet_thermal = ResNet(
            depth=depth,
            num_stages=num_stages,
            strides=strides,
            dilations=dilations,
            out_indices=out_indices,
            style=style,
            frozen_stages=frozen_stages,
            norm_cfg=norm_cfg,
            norm_eval=norm_eval,
            dcn=dcn,
            stage_with_dcn=stage_with_dcn,
            with_cp=with_cp,
            zero_init_residual=zero_init_residual
        )
        for i in out_indices:
            conv_name = "conv{}".format(i)
            self.add_module(conv_name, nn.Conv2d(int(512 * 2 ** i), int(256 * 2 ** i), 1))
            kaiming_init(getattr(self, conv_name))
            # relu_name = "relu{}".format(i)
            # self.add_module(nn.ReLU)
        self.out_indices = out_indices
Exemplo n.º 10
0
    def init_weights(self):
        if isinstance(self.pretrained, str):
            logger = get_root_logger()
            checkpoint = _load_checkpoint(self.pretrained,
                                          logger=logger,
                                          map_location='cpu')
            if 'state_dict' in checkpoint:
                state_dict = checkpoint['state_dict']
            elif 'model' in checkpoint:
                state_dict = checkpoint['model']
            else:
                state_dict = checkpoint

            if self.pretrain_style == 'timm':
                # Because the refactor of vit is blocked by mmcls,
                # so we firstly use timm pretrain weights to train
                # downstream model.
                state_dict = vit_convert(state_dict)

            if 'pos_embed' in state_dict.keys():
                if self.pos_embed.shape != state_dict['pos_embed'].shape:
                    logger.info(msg=f'Resize the pos_embed shape from '
                                f'{state_dict["pos_embed"].shape} to '
                                f'{self.pos_embed.shape}')
                    h, w = self.img_size
                    pos_size = int(
                        math.sqrt(state_dict['pos_embed'].shape[1] - 1))
                    state_dict['pos_embed'] = self.resize_pos_embed(
                        state_dict['pos_embed'],
                        (h // self.patch_size, w // self.patch_size),
                        (pos_size, pos_size), self.interpolate_mode)

            self.load_state_dict(state_dict, False)

        elif self.pretrained is None:
            super(VisionTransformer, self).init_weights()
            # We only implement the 'jax_impl' initialization implemented at
            # https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py#L353  # noqa: E501
            trunc_normal_init(self.pos_embed, std=.02)
            trunc_normal_init(self.cls_token, std=.02)
            for n, m in self.named_modules():
                if isinstance(m, nn.Linear):
                    trunc_normal_init(m.weight, std=.02)
                    if m.bias is not None:
                        if 'ffn' in n:
                            normal_init(m.bias, std=1e-6)
                        else:
                            constant_init(m.bias, 0)
                elif isinstance(m, nn.Conv2d):
                    kaiming_init(m.weight, mode='fan_in')
                    if m.bias is not None:
                        constant_init(m.bias, 0)
                elif isinstance(m, (_BatchNorm, nn.GroupNorm, nn.LayerNorm)):
                    constant_init(m.bias, 0)
                    constant_init(m.weight, 1.0)
Exemplo n.º 11
0
 def init_weights(self):
     for m in self.modules():
         if hasattr(m, "kaiming_init") and m.kaiming_init:
             kaiming_init(
                 m,
                 mode="fan_in",
                 nonlinearity="leaky_relu",
                 bias=0,
                 distribution="uniform",
                 a=1,
             )
Exemplo n.º 12
0
    def _init_weights(self):
        for m in self.modules():
            if isinstance(m, nn.Conv3d):
                kaiming_init(m)
            elif isinstance(m, _BatchNorm):
                constant_init(m, 1)

        if self.zero_init_residual:
            for m in self.modules():
                if isinstance(m, BlockX3D):
                    constant_init(m.conv3.bn, 0)
Exemplo n.º 13
0
 def init_weights(self):
     if self.init_method == 'xavier':
         xavier_init(self.conv[0])
         xavier_init(self.conv[1])
     elif self.init_method == 'kaiming':
         kaiming_init(self.conv[0], nonlinearity='relu')
         kaiming_init(self.conv[1], nonlinearity='relu')
     else:
         raise ValueError(f"Unkonw init method {self.init_method}")
     if self.with_norm:
         constant_init(self.norm, 1, bias=0)
Exemplo n.º 14
0
 def init_weights(self):
     if self.with_activatation and \
        self.act_cfg.get('type').lower() == 'leakyrelu':
         nonlinearity = 'leaky_relu'
         a = self.act_cfg.get('negative_slope', 0.01)
     else:
         nonlinearity = 'relu'
         a = 0
     kaiming_init(self.conv, a=a, nonlinearity=nonlinearity)
     if self.with_norm:
         constant_init(self.norm, 1, bias=0)
Exemplo n.º 15
0
 def init_weights(self, pretrained=None):
     # init for backbone
     self.backbone.init_weights(pretrained=pretrained)
     # init for classifier
     for m in self.classifier.modules():
         if isinstance(m, nn.Conv2d):
             kaiming_init(m)
         elif isinstance(m, nn.BatchNorm2d):
             constant_init(m, 1)
         elif isinstance(m, nn.Linear):
             normal_init(m, std=0.01)
Exemplo n.º 16
0
    def init_weights(self, pretrained=None):
        if isinstance(pretrained, str):
            state_dict = torch.load(pretrained)
            if 'state_dict' in state_dict:
                state_dict = state_dict['state_dict']

            if self.width_mult < 1.0:
                patched_dict = {}
                for k, v in state_dict.items():
                    if 'backbone.' in k:
                        k = k[len('backbone.'):]
                    if 'features.17.conv' in k and self.single_scale:
                        continue
                    if 'conv' in k:  # process convs in inverted residuals
                        if len(v.shape) == 1:
                            v = v[:int(v.shape[0] * self.width_mult)]
                        elif len(v.shape) == 4 and v.shape[1] == 1:
                            assert v.shape[2] == v.shape[3] and v.shape[2] == 3
                            v = v[:int(v.shape[0] * self.width_mult), ]
                        elif len(v.shape) == 4 and v.shape[2] == 1:
                            assert v.shape[2] == v.shape[3] and v.shape[2] == 1
                            v = v[:int(v.shape[0] * self.width_mult
                                       ), :int(v.shape[1] * self.width_mult), ]
                    elif 'features.0.' in k:  # process the first conv
                        if len(v.shape):
                            v = v[:int(v.shape[0] * self.width_mult), ]

                    patched_dict[k] = v

                self.load_state_dict(patched_dict, strict=False)
            elif self.width_mult == 1.0:
                patched_dict = {}
                if self.single_scale:
                    for k, v in state_dict.items():
                        if 'features.17.conv' in k:
                            continue
                        patched_dict[k] = v
                else:
                    patched_dict = state_dict

                self.load_state_dict(patched_dict, strict=False)
            else:
                print('Warning: loading of pre-trained weights is not \
                       supported for MobileNetV2 with width_mult > 1')
        elif pretrained is None:
            for m in self.modules():
                if isinstance(m, nn.Conv2d):
                    kaiming_init(m)
                elif isinstance(m, nn.BatchNorm2d):
                    constant_init(m, 1)
                elif isinstance(m, nn.Linear):
                    normal_init(m, std=0.01)
        else:
            raise TypeError('pretrained must be a str or None')
Exemplo n.º 17
0
 def init_weights(self):
     nonlinearity = 'relu' if self.activation is None else self.activation
     if not self.is_mink:
         kaiming_init(self.conv, nonlinearity=nonlinearity)
         if self.with_norm:
             constant_init(self.norm, 1, bias=0)
     else:
         #kaiming_init(self.conv.conv, nonlinearity=nonlinearity)
         if self.with_norm:
             nn.init.constant_(self.norm.bn.weight, 1)
             nn.init.constant_(self.norm.bn.bias, 0)
Exemplo n.º 18
0
 def init_weights(self, pretrained=None):
     if isinstance(pretrained, str):
         load_checkpoint(self, pretrained, strict=False)
     elif pretrained is None:
         for m in self.modules():
             if isinstance(m, nn.Conv2d):
                 kaiming_init(m)
             elif isinstance(m, (_BatchNorm, nn.GroupNorm)):
                 constant_init(m, 1)
     else:
         raise TypeError('pretrained must be a str or None')
    def init_weights(self, pretrained=None):
        if isinstance(pretrained, str):
            logger = get_root_logger()
            load_checkpoint(self, pretrained, strict=False, logger=logger)
#            load_pretrained_weights(self, self._model_name, load_fc=False)
        else:
            for m in self.modules():
                if isinstance(m, nn.Conv2d):
                    kaiming_init(m)
                elif isinstance(m, (_BatchNorm, nn.GroupNorm)):
                    constant_init(m, 1)
Exemplo n.º 20
0
 def init_weights(self):
     if self.with_embedding:
         for m in self.modules():
             if isinstance(m, nn.Conv2d) or isinstance(m, nn.Conv3d):
                 kaiming_init(m)
             elif isinstance(m, nn.BatchNorm2d) or isinstance(
                     m, nn.BatchNorm3d):
                 constant_init(m, 1.0, 0.0)
     else:
         nn.init.normal_(self.fc_cls_out.weight, 0, self.init_std)
         nn.init.constant_(self.fc_cls_out.bias, 0)
Exemplo n.º 21
0
    def init_weights(self):
        if isinstance(self.pretrained, str):
            logger = logging.getLogger()
            if self.pretrained2d:
                resnet2d = ResNet(self.depth)
                load_checkpoint(resnet2d,
                                self.pretrained,
                                strict=False,
                                logger=logger)

                for name, module in self.named_modules():
                    if isinstance(module, NonLocalModule):
                        module.init_weights()
                    elif isinstance(module, nn.Conv3d) and rhasattr(
                            resnet2d, name):
                        new_weight = rgetattr(resnet2d, name).weight.data.unsqueeze(2).expand_as(module.weight) / \
                                     module.weight.data.shape[2]
                        module.weight.data.copy_(new_weight)
                        logging.info(
                            "{}.weight loaded from weights file into {}".
                            format(name, new_weight.shape))
                        if hasattr(module, 'bias') and module.bias is not None:
                            new_bias = rgetattr(resnet2d, name).bias.data
                            module.bias.data.copy_(new_bias)
                            logging.info(
                                "{}.bias loaded from weights file into {}".
                                format(name, new_bias.shape))
                    elif isinstance(module, nn.BatchNorm3d) and rhasattr(
                            resnet2d, name):
                        for attr in [
                                'weight', 'bias', 'running_mean', 'running_var'
                        ]:
                            logging.info(
                                "{}.{} loaded from weights file into {}".
                                format(
                                    name, attr,
                                    getattr(rgetattr(resnet2d, name),
                                            attr).shape))
                            setattr(module, attr,
                                    getattr(rgetattr(resnet2d, name), attr))
            else:
                load_checkpoint(self,
                                self.pretrained,
                                strict=False,
                                logger=logger)
        elif self.pretrained is None:
            for m in self.modules():
                if isinstance(m, nn.Conv3d):
                    kaiming_init(m)
                elif isinstance(m, nn.BatchNorm3d):
                    constant_init(m, 1)
        else:
            raise TypeError('pretrained must be a str or None')
Exemplo n.º 22
0
    def init_weights(self, pretrained=None):
        if isinstance(pretrained, str):
            logger = logging.getLogger()
            if 'pytorch-classification' in pretrained: # load my model
                print('loading lx pretrained model...', pretrained)
                checkpoint = torch.load(pretrained)
                own_state = self.state_dict()
                state_dict = {k[7:]: v for k, v in checkpoint['state_dict'].items()}

                print('best acc = ', checkpoint['best_acc'])
                unexpected_keys = []
                for name, param in state_dict.items():
                    if name not in own_state:
                        unexpected_keys.append(name)
                        continue
                    if isinstance(param, torch.nn.Parameter):
                        param = param.data
                        print(param)

                    try:
                        own_state[name].copy_(param)
                    except Exception:
                        raise RuntimeError('While copying parameter named {}, whose dimensions in the model are {} and whose dimensions in the checkpoint are {}.'.format(name, own_state[name].size(), param.size()))
                missing_keys = set(own_state.keys()) - set(state_dict.keys())
                err_msg = []
                if unexpected_keys:
                    err_msg.append('unexpected key in source state_dict: {}\n\n'.format(', '.join(unexpected_keys)))
                if missing_keys:
                    err_msg.append('missing keys in source state_dict: {}\n\n'.format(', '.join(missing_keys)))
                if err_msg: logger.warn(err_msg)
            else:
                load_checkpoint(self, pretrained, strict=False, logger=logger)
        elif pretrained is None:
            for m in self.modules():
                if isinstance(m, nn.Conv2d):
                    kaiming_init(m)
                elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
                    constant_init(m, 1)

            if self.dcn is not None:
                for m in self.modules():
                    if isinstance(m, Bottleneck) and hasattr(
                            m, 'conv2_offset'):
                        constant_init(m.conv2_offset, 0)

            if self.zero_init_residual:
                for m in self.modules():
                    if isinstance(m, Bottleneck):
                        constant_init(m.norm3, 0)
                    elif isinstance(m, BasicBlock):
                        constant_init(m.norm2, 0)
        else:
            raise TypeError('pretrained must be a str or None')
Exemplo n.º 23
0
 def init_weights(self, pretrained=None):
     if isinstance(pretrained, str):
         logger = get_root_logger()
         load_checkpoint(self, pretrained, strict=False, logger=logger)
     elif pretrained is None:
         kaiming_init(self.st_feat_conv)
         kaiming_init(self.lt_feat_conv)
         for layer_name in self.non_local_layers:
             non_local_layer = getattr(self, layer_name)
             non_local_layer.init_weights(pretrained=pretrained)
     else:
         raise TypeError('pretrained must be a str or None')
Exemplo n.º 24
0
 def init_weights(self, pretrained=None):
     if isinstance(pretrained, str):
         logger = get_root_logger()
         load_checkpoint(self, pretrained, strict=False, logger=logger)
     elif pretrained is None:
         for m in self.modules():
             if isinstance(m, nn.Conv2d):
                 kaiming_init(m)
             elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
                 constant_init(m, 1)
     else:
         raise TypeError('pretrained must be a str and should not be None')
Exemplo n.º 25
0
 def init_weights(self, pretrained=None):
     super().init_weights(pretrained)
     if pretrained is None:
         for m in self.modules():
             if isinstance(m, nn.Conv2d):
                 kaiming_init(m)
             elif isinstance(m, _BatchNorm):
                 constant_init(m, 1)
             elif isinstance(m, nn.Linear):
                 normal_init(m, std=0.01)
     else:
         raise TypeError('pretrained must be a str or None')
Exemplo n.º 26
0
    def init_weights(self):
        for m in self.features.modules():
            if isinstance(m, nn.Conv2d):
                kaiming_init(m)
            elif isinstance(m, nn.BatchNorm2d):
                constant_init(m, 1)
            elif isinstance(m, nn.Linear):
                normal_init(m, std=0.01)

        for m in self.extra.modules():
            if isinstance(m, nn.Conv2d):
                xavier_init(m, distribution='uniform')
Exemplo n.º 27
0
 def init_weights(self, pretrained=None):
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             kaiming_init(m)
         elif isinstance(m, (_BatchNorm, nn.GroupNorm)):
             constant_init(m, 1)
     if self.zero_init_residual:
         for m in self.modules():
             if isinstance(m, Bottleneck):
                 constant_init(m.norm3, 0)
             elif isinstance(m, BasicBlock):
                 constant_init(m.norm2, 0)
Exemplo n.º 28
0
 def init_weights(self):
     for conv in self.convs:
         kaiming_init(conv)
     for fc in self.fcs:
         kaiming_init(
             fc,
             a=1,
             mode="fan_in",
             nonlinearity="leaky_relu",
             distribution="uniform",
         )
     normal_init(self.fc_mask_iou, std=0.01)
Exemplo n.º 29
0
 def init_weights(self, pretrained=None):
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             kaiming_init(m)
         elif isinstance(m, nn.BatchNorm2d):
             if m.weight is not None and m.bias is not None:
                 m.weight.data.fill_(1)
                 m.bias.data.zero_()
     
     if pretrained.use_load:
         model_dict = remap_for_archadapt(pretrained.load_path, self.state_dict(), pretrained.seed_num_layers)
         self.load_state_dict(model_dict)
Exemplo n.º 30
0
 def init_weights(self, pretrained=None):
     if isinstance(pretrained, str):
         logger = get_root_logger()
         load_checkpoint(self, pretrained, strict=False, logger=logger)
     elif pretrained is None:
         for m in self.res_blocks:
             for n in m.conv_block:
                 if isinstance(n, nn.Conv2d):
                     kaiming_init(n)
                 elif isinstance(n, nn.BatchNorm2d):
                     constant_init(n, 1)
     else:
         raise TypeError('pretrained must be a str or None')