Example #1
0
 def init_weight(self):
     for layer in self.sublayers():
         if isinstance(layer, nn.Conv2D):
             param_init.normal_init(layer.weight, std=0.001)
         elif isinstance(layer, (nn.BatchNorm, nn.SyncBatchNorm)):
             param_init.constant_init(layer.weight, value=1.0)
             param_init.constant_init(layer.bias, value=0.0)
Example #2
0
 def init_weight(self):
     """Initialize the parameters of model parts."""
     for sublayer in self.sublayers():
         if isinstance(sublayer, nn.Conv2D):
             param_init.normal_init(sublayer.weight, std=0.001)
         elif isinstance(sublayer, (nn.BatchNorm, nn.SyncBatchNorm)):
             param_init.constant_init(sublayer.weight, value=1.0)
             param_init.constant_init(sublayer.bias, value=0.0)
Example #3
0
    def init_weight(self):
        for _param in self.parameters():
            _param.optimize_attr['learning_rate'] = self.lr_multiple

        for layer in self.sublayers():
            if isinstance(layer, nn.LayerNorm):
                param_init.constant_init(layer.weight, value=1.0)
                param_init.constant_init(layer.bias, value=0.0)
Example #4
0
 def init_weight(self):
     for layer in self.sublayers():
         if isinstance(layer, nn.Conv2D):
             param_init.normal_init(layer.weight, std=0.001)
         elif isinstance(layer, (nn.BatchNorm, nn.SyncBatchNorm)):
             param_init.constant_init(layer.weight, value=1.0)
             param_init.constant_init(layer.bias, value=0.0)
     if self.pretrained is not None:
         utils.load_pretrained_model(self, self.pretrained)
Example #5
0
 def init_weight(self):
     if self.pretrained is not None:
         utils.load_entire_model(self, self.pretrained)
     else:
         for sublayer in self.sublayers():
             if isinstance(sublayer, nn.Conv2D):
                 param_init.kaiming_normal_init(sublayer.weight)
             elif isinstance(sublayer, (nn.BatchNorm, nn.SyncBatchNorm)):
                 param_init.constant_init(sublayer.weight, value=1.0)
                 param_init.constant_init(sublayer.bias, value=0.0)
Example #6
0
    def init_weight(self):
        for name, _param in self.named_parameters():
            if name.startswith('norms.') or name.startswith('mla.'):
                continue

            _param.optimize_attr['learning_rate'] = self.lr_multiple

        for layer in self.sublayers():
            if isinstance(layer, nn.LayerNorm):
                param_init.constant_init(layer.weight, value=1.0)
                param_init.constant_init(layer.bias, value=0.0)
Example #7
0
 def init_params(self):
     for m in self.sublayers():
         if isinstance(m, nn.Conv2D):
             param_init.kaiming_normal_init(m.weight)
             if m.bias is not None:
                 param_init.constant_init(m.bias, value=0.0)
         elif isinstance(m, nn.BatchNorm2D):
             param_init.constant_init(m.weight, value=1.0)
             param_init.constant_init(m.bias, value=0.0)
         elif isinstance(m, nn.Linear):
             param_init.normal_init(m.weight, std=0.001)
             if m.bias is not None:
                 param_init.constant_init(m.bias, value=0.0)