def __init__(self, num_layers=36, feature_dim=128, shape=(96, 64)): super(SphereNet_float32, self).__init__() assert num_layers in [12, 20, 36, 64], 'SphereNet num_layers should be 12, 20 or 64' if num_layers == 12: layers = [1, 1, 1, 1] filter_list = [3, 16, 32, 64, 128] fc_size = 128 * 6 * 4 elif num_layers == 20: layers = [1, 2, 4, 1] filter_list = [3, 64, 128, 256, 512] fc_size = 512 * 6 * 4 elif num_layers == 36: layers = [2, 4, 4, 2] filter_list = [3, 32, 64, 128, 256] fc_size = 256 * 6 * 4 elif num_layers == 64: layers = [3, 7, 16, 3] filter_list = [3, 64, 128, 256, 512] fc_size = 512 * 6 * 4 else: raise ValueError('sphere' + str(num_layers) + " IS NOT SUPPORTED! (sphere20 or sphere64)") self.shape = P.Shape() self.reshape = P.Reshape() block = BaseBlock self.layer1 = MakeLayer(block, filter_list[0], filter_list[1], layers[0], stride=2) self.layer2 = MakeLayer(block, filter_list[1], filter_list[2], layers[1], stride=2) self.layer3 = MakeLayer(block, filter_list[2], filter_list[3], layers[2], stride=2) self.layer4 = MakeLayer(block, filter_list[3], filter_list[4], layers[3], stride=2) self.fc = fc_with_initialize(fc_size, feature_dim) self.last_bn = nn.BatchNorm1d(feature_dim, momentum=0.9).add_flags_recursive(fp32=True) self.last_bn_sub = nn.BatchNorm2d(feature_dim, momentum=0.9).add_flags_recursive(fp32=True) self.cast = P.Cast() self.l2norm = P.L2Normalize(axis=1) for _, cell in self.cells_and_names(): if isinstance(cell, (nn.Conv2d, nn.Dense)): if cell.bias is not None: cell.weight.set_data(initializer(me_init.ReidKaimingUniform(a=math.sqrt(5), mode='fan_out'), cell.weight.shape)) cell.bias.set_data(initializer('zeros', cell.bias.shape)) else: cell.weight.set_data(initializer(me_init.ReidXavierUniform(), cell.weight.shape)) self.device_target = context.get_context('device_target')
def __init__(self, block, layers, args): super(FaceResNet, self).__init__() self.act_type = args.act_type self.inplanes = 64 self.use_se = args.use_se self.conv1 = conv3x3(3, 64, stride=1) self.bn1 = bn_with_initialize(64, use_inference=args.inference) self.prelu = nn.PReLU(64) if self.act_type == 'prelu' else P.ReLU() self.layer1 = MakeLayer(block, planes=64, inplanes=self.inplanes, blocks=layers[0], stride=2, args=args) self.inplanes = 64 self.layer2 = MakeLayer(block, planes=128, inplanes=self.inplanes, blocks=layers[1], stride=2, args=args) self.inplanes = 128 self.layer3 = MakeLayer(block, planes=256, inplanes=self.inplanes, blocks=layers[2], stride=2, args=args) self.inplanes = 256 self.layer4 = MakeLayer(block, planes=512, inplanes=self.inplanes, blocks=layers[3], stride=2, args=args) self.head = get_head(args) np.random.seed(1) for _, cell in self.cells_and_names(): if isinstance(cell, nn.Conv2d): cell.weight.set_data( initializer( me_init.ReidKaimingUniform(a=math.sqrt(5), mode='fan_out'), cell.weight.shape)) if cell.bias is not None: cell.bias.set_data(initializer('zeros', cell.bias.shape)) elif isinstance(cell, nn.Dense): cell.weight.set_data( initializer( me_init.ReidKaimingNormal(a=math.sqrt(5), mode='fan_out'), cell.weight.shape)) if cell.bias is not None: cell.bias.set_data(initializer('zeros', cell.bias.shape)) elif isinstance(cell, (nn.BatchNorm2d, nn.BatchNorm1d)): # defulat gamma 1 and beta 0, and if you set should be careful for the IRBlock gamma value pass for _, cell in self.cells_and_names(): if isinstance(cell, IRBlock): # be careful for bn3 Do not change the name unless IRBlock last bn change name cell.bn3.gamma.set_data( initializer('zeros', cell.bn3.gamma.shape))