def __init__(self, groups=32, axis=-1, epsilon=1e-5, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None, **kwargs): super(GroupNormalization, self).__init__(**kwargs) self.supports_masking = True self.groups = groups self.axis = axis self.epsilon = epsilon self.center = center self.scale = scale self.beta_initializer = initializers.get(beta_initializer) self.gamma_initializer = initializers.get(gamma_initializer) self.beta_regularizer = regularizers.get(beta_regularizer) self.gamma_regularizer = regularizers.get(gamma_regularizer) self.beta_constraint = constraints.get(beta_constraint) self.gamma_constraint = constraints.get(gamma_constraint)
def __init__( self, units, tied_to=None, # Enter a layer as input to enforce weight-tying activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): if 'input_shape' not in kwargs and 'input_dim' in kwargs: kwargs['input_shape'] = (kwargs.pop('input_dim'), ) super(DenseTransposeTied, self).__init__(**kwargs) self.units = units # We add these two properties to save the tied weights self.tied_to = tied_to self.tied_weights = self.tied_to.weights self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = InputSpec(min_ndim=2) self.supports_masking = True
def __init__(self, alpha_initializer='zeros', activity_regularizer=None, alpha_constraint=None, slope_constraint=None, slope=0.3, shared_axes=None, **kwargs): super(SparseLeakyReLU, self).__init__(**kwargs) self.supports_masking = True self.alpha_initializer = initializers.get(alpha_initializer) self.activity_regularizer = regularizers.get(activity_regularizer) self.alpha_constraint = constraints.get(alpha_constraint) self.slope_initializer = initializers.constant(slope) self.slope_constraint = constraints.get(slope_constraint)
def __init__(self, alpha_initializer='zeros', activity_regularizer=None, alpha_constraint=None, **kwargs): super(SparseReLU, self).__init__(**kwargs) self.supports_masking = True self.alpha_initializer = initializers.get(alpha_initializer) self.alpha_constraint = constraints.get(alpha_constraint)