def __init__(self, rank, filters, kernel_size, groups=1, strides=1, padding='VALID', data_format=None, dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): super().__init__(activity_regularizer=activity_regularizer, **kwargs) if filters % groups != 0: raise ValueError( "Groups must divide filters evenly, but got {}/{}".format( filters, groups)) self.filters = filters self.groups = groups self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, 'kernel_size') self.data_format = data_format self.padding = padding self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, rank, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint)
def __init__(self, filters, kernel_size, strides=(1, 1, 1), padding='valid', data_format=None, dilation_rate=(1, 1, 1), depth_multiplier=1, activation=None, use_bias=True, depthwise_initializer='glorot_uniform', pointwise_initializer='glorot_uniform', bias_initializer='zeros', depthwise_regularizer=None, pointwise_regularizer=None, bias_regularizer=None, activity_regularizer=None, depthwise_constraint=None, pointwise_constraint=None, bias_constraint=None, **kwargs): super(SeparableConv3D, self).__init__( filters=filters, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, activation=activation, use_bias=use_bias, bias_regularizer=bias_regularizer, activity_regularizer=activity_regularizer, bias_constraint=bias_constraint, **kwargs) self.depth_multiplier = depth_multiplier self.depthwise_initializer = initializers.get(depthwise_initializer) self.pointwise_initializer = initializers.get(pointwise_initializer) self.depthwise_regularizer = regularizers.get(depthwise_regularizer) self.pointwise_regularizer = regularizers.get(pointwise_regularizer) self.depthwise_constraint = constraints.get(depthwise_constraint) self.pointwise_constraint = constraints.get(pointwise_constraint)
def __init__(self, units, concat=False, use_bias=True, agg_method='mean', activation=None, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): super().__init__(**kwargs) self.units = units self.concat = concat self.use_bias = use_bias self.agg_method = agg_method self.aggregator = { 'mean': tf.reduce_mean, 'sum': tf.reduce_sum, 'max': tf.reduce_max, 'min': tf.reduce_min }[agg_method] self.activation = activations.get(activation) self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) if concat: self.output_dim = units * 2 else: self.output_dim = units
def __init__(self, units, activation='tanh', recurrent_activation='sigmoid', use_bias=True, kernel_initializer='glorot_uniform', recurrent_initializer='orthogonal', bias_initializer='zeros', unit_forget_bias=True, use_ln=False, kernel_regularizer=None, recurrent_regularizer=None, bias_regularizer=None, kernel_constraint=None, recurrent_constraint=None, bias_constraint=None, dropout=0., recurrent_dropout=0., implementation=1, **kwargs): super().__init__(**kwargs) self.units = units self.activation = activations.get(activation) self.recurrent_activation = activations.get(recurrent_activation) self.use_bias = use_bias self.use_ln = use_ln self.kernel_initializer = initializers.get(kernel_initializer) self.recurrent_initializer = initializers.get(recurrent_initializer) self.bias_initializer = initializers.get(bias_initializer) self.unit_forget_bias = unit_forget_bias self.kernel_regularizer = regularizers.get(kernel_regularizer) self.recurrent_regularizer = regularizers.get(recurrent_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.recurrent_constraint = constraints.get(recurrent_constraint) self.bias_constraint = constraints.get(bias_constraint) self.state_size = LSTMState(h=self.units, c=self.units) self.output_size = self.units
def __init__(self, units, mode='parallel', return_aweights=False, scaling_factor=None, noise_std=0, weights_initializer='he_normal', bias_initializer='zeros', weights_regularizer=None, bias_regularizer=None, weights_constraint=None, bias_constraint=None, **kwargs): if 'name' not in kwargs: kwargs['name'] = "" super(MonotonicBahdanauAttention, self).__init__(**kwargs) self.units = units self.mode = mode self.return_aweights = return_aweights self.scaling_factor = scaling_factor self.noise_std = noise_std self.weights_initializer = initializers.get(weights_initializer) self.bias_initializer = initializers.get(bias_initializer) self.weights_regularizer = regularizers.get(weights_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.weights_constraint = constraints.get(weights_constraint) self.bias_constraint = constraints.get(bias_constraint) self._wa = layers.Dense(self.units, use_bias=False,\ kernel_initializer=weights_initializer, bias_initializer=bias_initializer,\ kernel_regularizer=weights_regularizer, bias_regularizer=bias_regularizer,\ kernel_constraint=weights_constraint, bias_constraint=bias_constraint,\ name=self.name+"Wa") self._ua = layers.Dense(self.units,\ kernel_initializer=weights_initializer, bias_initializer=bias_initializer,\ kernel_regularizer=weights_regularizer, bias_regularizer=bias_regularizer,\ kernel_constraint=weights_constraint, bias_constraint=bias_constraint,\ name=self.name+"Ua") self._va = layers.Dense(1, use_bias=False, kernel_initializer=weights_initializer,\ kernel_regularizer=weights_regularizer, bias_regularizer=bias_regularizer,\ bias_initializer=bias_initializer, kernel_constraint=weights_constraint,\ bias_constraint=bias_constraint, name=self.name+"Va") self.supports_masking = True
def __init__(self, units, activation='relu', use_bias=True, kernel_initializer='glorot_uniform', recurrent_initializer=None, bias_initializer='zeros', kernel_regularizer=None, recurrent_regularizer=None, bias_regularizer=None, kernel_constraint=None, recurrent_constraint=None, bias_constraint=None, dropout=0., recurrent_dropout=0., use_batch_norm=False, **kwargs): super(IndRNNCell, self).__init__(**kwargs) self.units = units self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) if recurrent_initializer is None: self.recurrent_initializer = initializers.RandomUniform(-1.0, 1.0) else: self.recurrent_initializer = initializers.get(recurrent_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.recurrent_regularizer = regularizers.get(recurrent_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.recurrent_constraint = constraints.get(recurrent_constraint) self.bias_constraint = constraints.get(bias_constraint) self.dropout = min(1., max(0., dropout)) self.recurrent_dropout = min(1., max(0., recurrent_dropout)) self.use_batch_norm = use_batch_norm self.state_size = self.units self.output_size = self.units
def __init__(self, units, use_bias=True, kernel_initializer='glorot_uniform', kernel_regularizer=None, bias_initializer=None, bias_regularizer=None, trainable=True, name=None, **kwargs): super(Dense_SN, self).__init__(name=name, trainable=trainable, **kwargs) self.units = units self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer)
def __init__(self, n_styles, epsilon=1e-3, beta_initializer="zeros", gamma_initializer="ones", beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None, **kwargs): super(ConditionalInstanceNormalization, self).__init__(**kwargs) self.n_styles = n_styles self.epsilon = epsilon self.beta_initializer = initializers.get(beta_initializer) self.gamma_initializer = initializers.get(gamma_initializer) self.beta_regularizer = regularizers.get(beta_regularizer) self.gamma_regularizer = regularizers.get(gamma_regularizer) self.beta_constraint = constraints.get(beta_constraint) self.gamma_constraint = constraints.get(gamma_constraint)
def __init__(self, n_classes=10, s=64, m=0.50, regularizer=None , use_fp16=False,**kwargs): super(ArcFace, self).__init__(**kwargs) self.n_classes = n_classes self.s = s self.m = m self.regularizer = regularizers.get(regularizer) self.use_fp16 = use_fp16 self.cos_m = math.cos(m) self.sin_m = math.sin(m) self.mm = self.sin_m * m self.threshold = math.cos(math.pi - m)
def __init__(self, units: int, kernel_initializer, kernel_regularizer=None, kernel_constraint=None, **kwargs): super(MultivariateGaussianNoise, self).__init__(**kwargs) self.units = units self.kernel_initializer = initializers.get(kernel_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.kernel_constraint = constraints.get(kernel_constraint)
def __init__(self, trainable=True, initializer='ones', regularizer=None, constraint=None, **kwargs): super().__init__(**kwargs) self.trainable = trainable self.initializer = initializers.get(initializer) self.regularizer = regularizers.get(regularizer) self.constraint = constraints.get(constraint)
def __init__(self, tau_regularizer=None, **kwargs): """Initialize the TLU layer. Args: tau_regularizer: tf.keras.regularizer for tau. **kwargs: keyword arguments passed to the Keras layer base class. """ self.tau_regularizer = regularizers.get(tau_regularizer) super(TLU, self).__init__(**kwargs)
def __init__(self, n_classes=10, s=10.0, m=0.50, regularizer=None, **kwargs): self.n_classes = n_classes self.s = tf.dtypes.cast(s, dtype=K.floatx()) self.m = tf.dtypes.cast(m, dtype=K.floatx()) self.cos_m = tf.dtypes.cast(math.cos(m), dtype=K.floatx()) self.sin_m = tf.dtypes.cast(math.sin(m), dtype=K.floatx()) self.threshold = tf.dtypes.cast(math.cos(math.pi - m), dtype=K.floatx()) self.mm = tf.dtypes.cast(math.sin(m)*m, dtype=K.floatx()) self.regularizer = regularizers.get(regularizer) super(ArcFace, self).__init__(**kwargs)
def __init__(self, n_classes=10, s=30.0, m=0.50, regularizer=None, **kwargs): super(ArcFace, self).__init__(**kwargs) self.n_classes = n_classes self.s = s self.m = m self.regularizer = regularizers.get(regularizer)
def __init__(self, initializer='glorot_uniform', activation='sigmoid', activity_regularizer=None, **kwargs): super(RawWeights, self).__init__(**kwargs) self.activation = activations.get(activation) self.initializer = initializers.get(initializer) self.activity_regularizer = regularizers.get(activity_regularizer) self.supports_masking = False
def __init__(self, attn_kernel_initializer='glorot_uniform', attn_kernel_regularizer=None, attn_kernel_constraint=None, **kwargs): super().__init__(**kwargs) self.attn_kernel_initializer = initializers.get( attn_kernel_initializer) self.attn_kernel_regularizer = regularizers.get( attn_kernel_regularizer) self.attn_kernel_constraint = constraints.get(attn_kernel_constraint)
def __init__(self, embedding_dim=10, regularizer='l2', trainable=True, numerical_embedding=True, **kwargs): super(Embedding, self).__init__(**kwargs) self._regularizer = regularizers.get(regularizer) self._embedding_dim = embedding_dim self._trainable = trainable self._numerical_embedding = numerical_embedding
def __init__(self, n_classes=10, enhance=64.0, penalty=0.50, regularizer=None, **kwargs): super(ArcFace, self).__init__(**kwargs) self.n_classes = n_classes self.s = enhance self.m = penalty self.regularizer = get(regularizer)
def __init__(self, nb_gaussian, init='normal', weights=None, W_regularizer=None, activity_regularizer=None, W_constraint=None, **kwargs): self.nb_gaussian = nb_gaussian #self.init = initializers.get(init, dim_ordering='th') self.init = initializers.get(init) #New Version self.W_regularizer = regularizers.get(W_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.W_constraint = constraints.get(W_constraint) self.input_spec = [InputSpec(ndim=4)] self.initial_weights = weights super(LearningPrior, self).__init__(**kwargs)
def __init__(self, W_regularizer=None, u_regularizer=None, b_regularizer=None, W_constraint=None, u_constraint=None, b_constraint=None, bias=True, return_attention=False, **kwargs): self.supports_masking = True self.return_attention = return_attention self.init = initializers.get('glorot_uniform') self.W_regularizer = regularizers.get(W_regularizer) self.u_regularizer = regularizers.get(u_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.W_constraint = constraints.get(W_constraint) self.u_constraint = constraints.get(u_constraint) self.b_constraint = constraints.get(b_constraint) self.bias = bias super(AttentionWithContext, self).__init__(**kwargs)
def __init__(self, axis=-1, momentum=0.99, center=True, scale=True, epsilon=1e-3, r_max_value=3., d_max_value=5., t_delta=1., weights=None, beta_initializer='zero', gamma_initializer='one', moving_mean_initializer='zeros', moving_variance_initializer='ones', gamma_regularizer=None, beta_regularizer=None, beta_constraint=None, gamma_constraint=None, **kwargs): self.supports_masking = True self.axis = axis self.epsilon = epsilon self.center = center self.scale = scale self.momentum = momentum self.gamma_regularizer = regularizers.get(gamma_regularizer) self.beta_regularizer = regularizers.get(beta_regularizer) self.initial_weights = weights self.r_max_value = r_max_value self.d_max_value = d_max_value self.t_delta = t_delta self.beta_initializer = initializers.get(beta_initializer) self.gamma_initializer = initializers.get(gamma_initializer) self.moving_mean_initializer = initializers.get( moving_mean_initializer) self.moving_variance_initializer = initializers.get( moving_variance_initializer) self.beta_constraint = constraints.get(beta_constraint) self.gamma_constraint = constraints.get(gamma_constraint) super(BatchRenormalization, self).__init__(**kwargs)
def __init__(self, head_num, name="attention", activation='relu', use_bias=True, kernel_initializer='glorot_normal', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, kernel_constraint=None, bias_constraint=None, history_only=False, **kwargs): """Initialize the layer. :param head_num: Number of heads. :param activation: Activations for linear mappings. :param use_bias: Whether to use bias term. :param kernel_initializer: Initializer for linear mappings. :param bias_initializer: Initializer for linear mappings. :param kernel_regularizer: Regularizer for linear mappings. :param bias_regularizer: Regularizer for linear mappings. :param kernel_constraint: Constraints for linear mappings. :param bias_constraint: Constraints for linear mappings. :param history_only: Whether to only use history in attention layer. """ self.supports_masking = True self.head_num = head_num self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.history_only = history_only self.Wq, self.Wk, self.Wv, self.Wo = None, None, None, None self.bq, self.bk, self.bv, self.bo = None, None, None, None super(MultiHeadAttention, self).__init__(name=name, **kwargs)
def __init__(self, filters, kernel_size, strides=(1, 1), padding='valid', data_format=None, dilation_rate=(1, 1), activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, spectral_normalization=True, bias_constraint=None, **kwargs): if data_format is None: data_format = K.image_data_format() super(Conv2D, self).__init__( filters=filters, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, activation=activations.get(activation), use_bias=use_bias, kernel_initializer=initializers.get(kernel_initializer), bias_initializer=initializers.get(bias_initializer), kernel_regularizer=regularizers.get(kernel_regularizer), bias_regularizer=regularizers.get(bias_regularizer), activity_regularizer=regularizers.get(activity_regularizer), kernel_constraint=constraints.get(kernel_constraint), bias_constraint=constraints.get(bias_constraint), **kwargs) self.u = K.random_normal_variable( [1, filters], 0, 1, dtype=self.dtype, name="sn_estimate") # [1, out_channels] self.spectral_normalization = spectral_normalization
def __init__(self, rank, filters, kernel_size, strides=1, padding='valid', data_format=None, dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, spectral_normalization=True, **kwargs): super(_ConvSN, self).__init__(**kwargs) self.rank = rank self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(strides, rank, 'strides') self.padding = conv_utils.normalize_padding(padding) self.data_format = conv_utils.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple( dilation_rate, rank, 'dilation_rate') self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = InputSpec(ndim=self.rank + 2) self.spectral_normalization = spectral_normalization self.u = None
def __init__(self, axis=-1, momentum=0.9, epsilon=1e-4, center=True, scale=True, beta_initializer='zeros', gamma_diag_initializer='sqrt_init', gamma_off_initializer='zeros', moving_mean_initializer='zeros', moving_variance_initializer='sqrt_init', moving_covariance_initializer='zeros', beta_regularizer=None, gamma_diag_regularizer=None, gamma_off_regularizer=None, beta_constraint=None, gamma_diag_constraint=None, gamma_off_constraint=None, **kwargs): super(ComplexBatchNormalization, self).__init__(**kwargs) self.supports_masking = True self.axis = axis self.momentum = momentum self.epsilon = epsilon self.center = center self.scale = scale self.beta_initializer = sanitizedInitGet(beta_initializer) self.gamma_diag_initializer = sanitizedInitGet(gamma_diag_initializer) self.gamma_off_initializer = sanitizedInitGet(gamma_off_initializer) self.moving_mean_initializer = sanitizedInitGet( moving_mean_initializer) self.moving_variance_initializer = sanitizedInitGet( moving_variance_initializer) self.moving_covariance_initializer = sanitizedInitGet( moving_covariance_initializer) self.beta_regularizer = regularizers.get(beta_regularizer) self.gamma_diag_regularizer = regularizers.get(gamma_diag_regularizer) self.gamma_off_regularizer = regularizers.get(gamma_off_regularizer) self.beta_constraint = constraints.get(beta_constraint) self.gamma_diag_constraint = constraints.get(gamma_diag_constraint) self.gamma_off_constraint = constraints.get(gamma_off_constraint)
def __init__(self, filters, kernel_size, activation=None, use_bias=True, kernel_initializer='uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, normalize=False, offset=None, in_channels=None, **kwargs): from tensorflow.keras import activations, initializers, regularizers self.filters = filters self.kernel_size = kernel_size self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.normalize = normalize if not (np.asarray(kernel_size) == kernel_size[0]).all(): raise Exception("Only cubic kernel sizes are supported.") if offset is None: if kernel_size[0] % 2: self.offset = tf.zeros(shape=(3, )) else: self.offset = tf.fill([3], -0.5) else: self.offset = offset self.fixed_radius_search = FixedRadiusSearch(metric='Linf', ignore_query_point=False, return_distances=False) super().__init__(**kwargs)
def _get_regularisers_from_keywords(self, kwargs): self.kernel_initializer = initializers.get( kwargs.pop("kernel_initializer", "glorot_uniform")) self.kernel_regularizer = regularizers.get( kwargs.pop("kernel_regularizer", None)) self.kernel_constraint = constraints.get( kwargs.pop("kernel_constraint", None)) self.bias_initializer = initializers.get( kwargs.pop("bias_initializer", "zeros")) self.bias_regularizer = regularizers.get( kwargs.pop("bias_regularizer", None)) self.bias_constraint = constraints.get( kwargs.pop("bias_constraint", None)) self.attn_kernel_initializer = initializers.get( kwargs.pop("attn_kernel_initializer", "glorot_uniform")) self.attn_kernel_regularizer = regularizers.get( kwargs.pop("attn_kernel_regularizer", None)) self.attn_kernel_constraint = constraints.get( kwargs.pop("attn_kernel_constraint", None))
def __init__(self, kernel_initializer='ones', kernel_regularizer=None, kernel_constraint=regularizers.l1_l2(l1=1e-3, l2=1e-3), **kwargs): if 'input_shape' not in kwargs and 'input_dim' in kwargs: kwargs['input_shape'] = (kwargs.pop('input_dim'), ) super(DFS, self).__init__(**kwargs) self.kernel_initializer = initializers.get(kernel_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.supports_masking = True
def __init__(self, reg_epsilon=1.0e-6, tau_regularizer=None, beta_regularizer=None, gamma_regularizer=None, **kwargs): """Initialize the FRN layer. Args: reg_epsilon: float, the regularization parameter preventing a division by zero. tau_regularizer: tf.keras.regularizer for tau. beta_regularizer: tf.keras.regularizer for beta. gamma_regularizer: tf.keras.regularizer for gamma. **kwargs: keyword arguments passed to the Keras layer base class. """ self.reg_epsilon = reg_epsilon self.tau_regularizer = regularizers.get(tau_regularizer) self.beta_regularizer = regularizers.get(beta_regularizer) self.gamma_regularizer = regularizers.get(gamma_regularizer) super(FRN, self).__init__(**kwargs)
def __init__(self, step_dim, W_regularizer=None, b_regularizer=None, W_constraint=None, b_constraint=None, bias=True, **kwargs): self.supports_masking = True self.init = initializers.get('glorot_uniform') self.W_regularizer = regularizers.get(W_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.W_constraint = constraints.get(W_constraint) self.b_constraint = constraints.get(b_constraint) self.bias = bias self.step_dim = step_dim self.features_dim = 0 super(Attention, self).__init__(**kwargs)