Exemplo n.º 1
0
    def init_weights(self, pretrained=None):
        """Initiate the parameters either from existing checkpoint or from
        scratch."""
        if isinstance(pretrained, str):

            if self.pretrained2d:
                # Inflate 2D model into 3D model.
                self.inflate_weights(pretrained)

            else:
                # Directly load 3D model.
                load_checkpoint(self, pretrained, strict=False)

        elif pretrained is None:
            for m in self.modules():
                if isinstance(m, nn.Conv3d):
                    kaiming_init(m)
                elif isinstance(m, nn.BatchNorm3d):
                    constant_init(m, 1)

            if self.zero_init_residual:
                for m in self.modules():
                    if isinstance(m, Bottleneck3d):
                        constant_init(m.bn3, 0)
                    elif isinstance(m, BasicBlock3d):
                        constant_init(m.bn2, 0)
        else:
            raise TypeError('pretrained must be a str or None')
Exemplo n.º 2
0
 def init_weights(self):
     if isinstance(self.pretrained, str):
         load_checkpoint(self, self.pretrained)
     elif self.pretrained is None:
         self.fast_path.init_weights(self.pretrained)
         self.slow_path.init_weights(self.pretrained)
     else:
         raise TypeError('pretrained must be a str of None')
Exemplo n.º 3
0
 def init_weights(self, pretrained=None):
     if isinstance(pretrained, str):
         load_checkpoint(self, pretrained, strict=False)
     elif pretrained is None:
         for m in self.modules():
             if isinstance(m, nn.Conv2d):
                 normal_init(m, std=self.init_gain)
             elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
                 normal_init(m, mean=1, std=self.init_gain)
     else:
         raise TypeError('pretrained must be a str or None')
    def init_weights(self, pretrained=None):
        if isinstance(pretrained, str):
            load_checkpoint(self, pretrained, strict=False)

        elif pretrained is None:
            for m in self.modules():
                if isinstance(m, nn.Conv2d):
                    kaiming_init(m)
                elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
                    constant_init(m, 1)

        else:
            raise ValueError(f'pretrained should be str or None.')
Exemplo n.º 5
0
    def init_weights(self, pretrained=None):
        if isinstance(pretrained, str):
            load_checkpoint(self, pretrained, strict=False)

        elif pretrained is None:
            for m in self.modules():
                if isinstance(m, nn.Conv2d):
                    n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
                    normal_init(m, mean=0, std=math.sqrt(2. / n))
                elif isinstance(m, nn.BatchNorm2d):
                    constant_init(m, 1)
        else:
            raise TypeError('pretrained must be str or None.')
Exemplo n.º 6
0
 def init_weights(self, pretrained=None):
     if isinstance(pretrained, str):
         load_checkpoint(self, pretrained, strict=False)
     
     elif pretrained is None:
         for m in self.modules():
             if isinstance(m, nn.Conv2d):
                 kaiming_init(m)
             elif isinstance(m, nn.BatchNorm2d):
                 constant_init(m, 1)
     
     else:
         raise TypeError('pretrained must be a str or None')
Exemplo n.º 7
0
    def init_weights(self, pretrained=None):
        if isinstance(pretrained, str):
            load_checkpoint(self, pretrained, strict=False)

        elif pretrained is None:
            for m in self.modules():
                if isinstance(m, nn.Conv2d):
                    kaiming_init(m)
                elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
                    constant_init(m, 1)
            
            if self.zero_init_residual:
                for m in self.modules():
                    if isinstance(m, Bottleneck):
                        constant_init(m.bn3, 0)
                    elif isinstance(m, BasicBlock):
                        constant_init(m.bn2, 0)
        
        else:
            raise TypeError('pretrained must be a str or None')
Exemplo n.º 8
0
 def load_checkpoint(self, filename, map_location='cpu', strict=False):
     self.log.info('load checkpoint from %s', filename)
     if filename.endswith('npy'):
         dtype = torch.cuda.FloatTensor
         if hasattr(self.model, 'module'):
             state_dict = load_npy_weights(self.model.module, filename,
                                           dtype)
             load_state_dict(self.model.module, state_dict, strict)
         else:
             state_dict = load_npy_weights(self.model, filename, dtype)
             load_state_dict(self.model, state_dict, strict)
     else:
         return load_checkpoint(self.model, filename, map_location, strict)
Exemplo n.º 9
0
 def init_weights(self, pretrained=None):
     if isinstance(pretrained, str):
         load_checkpoint(self, pretrained, strict=False)
     elif pretrained is not None:
         raise TypeError('pretrained must be str or None.')
Exemplo n.º 10
0
 def load_checkpoint(self, filename, map_location='cpu', strict=False):
     self.log.info('load checkpoint from %s', filename)
     return load_checkpoint(self.model, filename, map_location, strict)