def __init__(self, num_features, cast_func=void_cast_func, eps=1e-05, momentum=0.1, n_train_sample=1): BitCenterLayer.__init__( self, fp_functional=F.batch_norm, lp_functional=bit_center_batch_norm2d, bias=True, cast_func=cast_func, n_train_sample=n_train_sample) BatchNorm2d.__init__( self, num_features=num_features, eps=eps, momentum=momentum, affine=True, track_running_stats=True) # set up delta part of affine transform param self.setup_bit_center_vars() # set up delta part of the running statistics self.setup_bit_center_stat() self.cuda() # initialize bit center delta parameters (the offset part is initialized by the base BatchNorm2D class) self.reset_parameters_bit_center() # initialize bit center delta running statistics (the offset part is initialized by the base BatchNorm2D class) self.reset_stat_bit_center() # register backward hook to update grad cache self.register_backward_hook(self.update_grad_output_cache)
def __init__(self, num_channels, eps=1e-5, **kwargs): BatchNorm2d.__init__(self, num_features=num_channels, eps=eps, **kwargs)