def __init__(self, axis=-1, momentum=0.99, epsilon=1e-3, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, name=None, **kwargs): """Create a ``BatchNormalization`` layer. Parameters ---------- axis : int, optional, default=-1 The channel axis. momentum : float, optional, default=0.99 The decay factor of running average. epsilon : float, optional, default=1e-3 The epsilon value. center : bool, optional, default=True **False** to freeze the ``beta`` anyway. scale : bool, optional, default=True **False** to freeze the ``gamma`` anyway. beta_initializer : Union[callable, str], optional The initializer for beta tensor. gamma_initializer : Union[callable, str], optional The initializer for gamma tensor. moving_mean_initializer : Union[callable, str], optional The initializer for moving mean tensor. moving_variance_initializer : Union[callable, str], optional The initializer for moving variance tensor. beta_regularizer : Union[callable, str], optional The regularizer for beta tensor. gamma_regularizer : Union[callable, str], optional The regularizer for gamma tensor. """ super(BatchNormalization, self).__init__(name=name, **kwargs) self.axis = axis self.momentum = momentum self.epsilon = epsilon self.center = center self.scale = scale self.beta_initializer = initializers.get(beta_initializer) self.gamma_initializer = initializers.get(gamma_initializer) self.moving_mean_initializer = initializers.get( moving_mean_initializer) self.moving_variance_initializer = initializers.get( moving_variance_initializer) self.beta_regularizer = regularizers.get(beta_regularizer) self.gamma_regularizer = regularizers.get(gamma_regularizer) self.beta = None self.gamma = None self.moving_mean = None self.moving_variance = None
def __init__(self, filters, kernel_size, strides=1, padding='valid', data_format='channels_last', dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, **kwargs): """Create a ``Conv3D`` Layer. Parameters ---------- filters : int The number of output filters. kernel_size : Union[int, Sequence[int]] The shape of convolution window. strides : Union[int, Sequence[int]], optional, default=1 The stride of convolution window. padding : Union[str, Sequence[int]], optional The padding algorithm or size. data_format : str, optional, default='channels_last' ``'channels_first'`` or ``'channels_last'``. dilation_rate : Union[int, Sequence[int]], optional, default=1 The rate of dilated convolution. activation : Union[callable, str], optional The optional activation function. use_bias : bool, optional, default=True Add a bias tensor to output or not. kernel_initializer : Union[callable, str], optional The initializer for kernel tensor. bias_initializer : Union[callable, str], optional The initializer for bias tensor. kernel_regularizer : Union[callable, str], optional The regularizer for kernel tensor. bias_regularizer : Union[callable, str], optional The regularizer for bias tensor. """ super(Conv3D, self).__init__( rank=3, filters=filters, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, dilation_rate=dilation_rate, activation=activations.get(activation), use_bias=use_bias, kernel_initializer=initializers.get(kernel_initializer), bias_initializer=initializers.get(bias_initializer), kernel_regularizer=regularizers.get(kernel_regularizer), bias_regularizer=regularizers.get(bias_regularizer), **kwargs)
def __init__(self, units, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, **kwargs): """Create a ``Dense`` layer. Parameters ---------- units : int The number of output units. activation : Union[callable, str], optional The optional activation function. use_bias : bool, optional, default=True ``True`` to apply a ``bias``. kernel_initializer : Union[callable, str], optional The initializer for kernel tensor. bias_initializer : Union[callable, str], optional The initializer for bias tensor. kernel_regularizer : Union[callable, str], optional The regularizer for kernel tensor. bias_regularizer : Union[callable, str], optional The regularizer for bias tensor. """ super(Dense, self).__init__(**kwargs) self.input_dim = kwargs.get('input_dim', None) self.units = int(units) self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.input_spec = InputSpec(min_ndim=2) self.kernel = None self.bias = None
def add_weight(self, name=None, shape=None, dtype=None, initializer=None, regularizer=None, trainable=True, use_resource=None, **kwargs): """Add a new variable as the weight. Parameters ---------- name : str, optional The optional variable name. shape : Sequence[int], optional The variable shape. dtype : str, optional The optional data type. initializer : Union[callable, str], optional The optional initializer. regularizer : Union[callable, str], optional The optional regularizer. trainable : bool, optional, default=True ``True`` to add to the ``trainable`` collection. use_resource : bool, optional, default=True ``True`` to set as a ``ResourceVariable``. """ if shape is None: shape = () initializer = initializers.get(initializer) regularizer = regularizers.get(regularizer) # Determine the data type if dtype is None: dtype = self.dtype or dtypes.float32 dtype = dtypes.as_dtype(dtype) # Determine the variable flags trainable = True if trainable is None else trainable use_resource = True if use_resource is None else use_resource # Determine the initializer if initializer is None: if dtype.is_floating: initializer = initializers.glorot_uniform() elif dtype.is_integer or dtype.is_unsigned or dtype.is_bool: initializer = initializers.zeros() else: raise ValueError('Excepted an initializer set for variable') variable = tf_variables.get_variable( name=name, shape=shape, initializer=initializer, regularizer=regularizer, dtype=dtype, trainable=trainable, use_resource=use_resource, ) if trainable: self._trainable_weights.append(variable) else: self._non_trainable_weights.append(variable) return variable
def __init__( self, rank, filters, kernel_size, strides=1, padding='valid', data_format='channels_last', dilation_rate=1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, trainable=True, name=None, **kwargs, ): """Create a ``Conv`` Layer. Parameters ---------- rank : int The number of spatial axes. filters : int, optional The number of output filters. kernel_size : Union[int, Sequence[int]] The shape of convolution window. strides : Union[int, Sequence[int]], optional, default=1 The stride of convolution window. padding : Union[int, Sequence[int], str], optional The padding algorithm or size. data_format : str, optional, default='channels_last' ``'channels_first'`` or ``'channels_last'``. dilation_rate : Union[int, Sequence[int]], optional, default=1 The rate of dilated convolution. activation : Union[callable, str], optional The optional activation function. use_bias : bool, optional, default=True Add a bias tensor to output or not. kernel_initializer : Union[callable, str], optional The initializer for kernel tensor. bias_initializer : Union[callable, str], optional The initializer for bias tensor. kernel_regularizer : Union[callable, str], optional The regularizer for kernel tensor. bias_regularizer : Union[callable, str], optional The regularizer for bias tensor. """ super(Conv, self).__init__(trainable=trainable, name=name, **kwargs) self.rank = rank self.filters = filters self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank) self.strides = conv_utils.normalize_tuple(strides, rank) self.padding = conv_utils.normalize_padding(padding) self.data_format = conv_utils.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple(dilation_rate, rank) self.activation = activations.get(activation) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.input_spec = InputSpec(ndim=self.rank + 2) self.conv_function = kwargs.get('conv_function', nn_ops.convolution) self.kernel = None self.bias = None