Ejemplo n.º 1
0
    def init_weights(self):
        normal_init(self.fc_cls, std=0.01)
        normal_init(self.fc_reg, std=0.001)

        for m in self.fc_branch.modules():
            if isinstance(m, nn.Linear):
                xavier_init(m, distribution='uniform')
Ejemplo n.º 2
0
 def init_weights(self):
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             kaiming_init(m)
         elif isinstance(m, nn.BatchNorm2d):
             constant_init(m, 1)
         elif isinstance(m, nn.Linear):
             normal_init(m, std=0.01)
Ejemplo n.º 3
0
    def init_weights(self):
        # conv layers are already initialized by ConvModule
        normal_init(self.fc_cls, std=0.01)
        normal_init(self.fc_reg, std=0.001)

        for m in self.fc_branch.modules():
            if isinstance(m, nn.Linear):
                xavier_init(m, distribution='uniform')
Ejemplo n.º 4
0
 def init_weights(self, pretrained=None):
     if isinstance(pretrained, str):
         logger = logging.getLogger()
         load_checkpoint(self, pretrained, strict=False, logger=logger)
     elif pretrained is None:
         for m in self.modules():
             if isinstance(m, nn.Conv2d):
                 kaiming_init(m)
             elif isinstance(m, nn.BatchNorm2d):
                 constant_init(m, 1)
             elif isinstance(m, nn.Linear):
                 normal_init(m, std=0.01)
     else:
         raise TypeError('pretrained must be a str or None')