Beispiel #1
0
 def init_weights(self):
     # initialize weight and bias
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             xavier_init(m)
         elif isinstance(m, nn.BatchNorm2d):
             uniform_init(m)
Beispiel #2
0
def test_uniform_init():
    conv_module = nn.Conv2d(3, 16, 3)
    uniform_init(conv_module, bias=0.1)
    # TODO: sanity check of weight distribution, e.g. mean, std
    assert conv_module.bias.allclose(torch.full_like(conv_module.bias, 0.1))
    conv_module_no_bias = nn.Conv2d(3, 16, 3, bias=False)
    uniform_init(conv_module_no_bias)
Beispiel #3
0
 def init_weights(self, pretrained=None):
     # initialize weight and bias
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             kaiming_init(m)
         elif isinstance(m, nn.BatchNorm2d):
             uniform_init(m)
Beispiel #4
0
 def init_weights(self, pretrained=None):
     if pretrained is not None:
         logger = get_root_logger()
         load_checkpoint(self, pretrained, strict=False, logger=logger)
     else:
         for m in self.modules():
             if isinstance(m, nn.Conv2d):
                 xavier_init(m)
             elif isinstance(m, nn.BatchNorm2d):
                 uniform_init(m)
Beispiel #5
0
 def init_weights(self, pretrained=None):
     for m in self.modules():
         if isinstance(m, nn.Conv2d):
             xavier_init(m)
         elif isinstance(m, nn.BatchNorm2d):
             uniform_init(m)
 def init_weights(self):
     """Initialize the learnable weights."""
     uniform_init(self.row_embed)
     uniform_init(self.col_embed)