Example #1
0
 def init_weights(self, pretrained=None):
     if isinstance(self.fc, nn.Sequential):
         for layer in self.fc:
             if not hasattr(layer, 'weight'):
                 continue
             normal_init(layer, std=self.init_std)
     else:
         normal_init(self.fc, std=self.init_std)
Example #2
0
 def init_weights(self, pretrained=None):
     if isinstance(pretrained, str):
         load_checkpoint(self, pretrained, strict=False)
     elif pretrained is None:
         for m in self.modules():
             if isinstance(m, nn.Conv2d):
                 normal_init(m, std=self.init_gain)
             elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
                 normal_init(m, mean=1, std=self.init_gain)
     else:
         raise TypeError('pretrained must be a str or None')
Example #3
0
    def init_weights(self, pretrained=None):
        if isinstance(pretrained, str):
            load_checkpoint(self, pretrained, strict=False)

        elif pretrained is None:
            for m in self.modules():
                if isinstance(m, nn.Conv2d):
                    n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
                    normal_init(m, mean=0, std=math.sqrt(2. / n))
                elif isinstance(m, nn.BatchNorm2d):
                    constant_init(m, 1)
        else:
            raise TypeError('pretrained must be str or None.')
Example #4
0
 def init_weights(self, pretrained=None):
     normal_init(self.rpn_conv)
     normal_init(self.rpn_cls)
     normal_init(self.rpn_reg)
Example #5
0
 def init_weights(self, pretrained=None):
     normal_init(self.fc, std=self.init_std)
Example #6
0
 def init_weights(self, pretrained=None):
     normal_init(self.conv_seg, mean=0, std=0.01)