def init_weights(self): # conv layers are already initialized by ConvModule normal_init(self.fc_cls, std=0.01) normal_init(self.fc_reg, std=0.001) for m in self.fc_branch.modules(): if isinstance(m, nn.Linear): xavier_init(m, distribution='uniform')
def init_weights(self, pretrained=None): if isinstance(pretrained, str): logger = get_root_logger() load_checkpoint(self, pretrained, strict=False, logger=logger) elif pretrained is None: for m in self.modules(): if isinstance(m, nn.Conv2d): xavier_init(m) elif isinstance(m, nn.BatchNorm2d): constant_init(m, 1)
def init_weights(self): """Init weights for the module. """ for m in self.modules(): if isinstance(m, nn.Conv2d): xavier_init(m)
def init_weights(self): for m in self.modules(): if isinstance(m, nn.Conv2d): xavier_init(m)