Esempio n. 1
0
 def __init__(self,
              units,
              num_chunks,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              recurrent_initializer='orthogonal',
              bias_initializer='zeros',
              kernel_regularizer=None,
              recurrent_regularizer=None,
              bias_regularizer=None,
              activation=None,
              **kwargs):
     super(RNNCellBase, self).__init__(activation=activation, **kwargs)
     self.units = units
     self.num_chunks = num_chunks
     self.use_bias = use_bias
     self.kernel_initializer = initializers.get(kernel_initializer)
     self.recurrent_initializer = initializers.get(recurrent_initializer)
     self.bias_initializer = initializers.get(bias_initializer)
     self.kernel_regularizer = regularizers.get(kernel_regularizer)
     self.recurrent_regularizer = regularizers.get(recurrent_regularizer)
     self.bias_regularizer = regularizers.get(bias_regularizer)
     self.state_size = self.units
     self.output_size = self.units
     self.kernel = None
     self.recurrent = None
     self.bias = None
Esempio n. 2
0
 def __init__(self,
              kernel_size,
              depth_multiplier=1,
              strides=(1, 1),
              padding='SAME',
              dilation_rate=(1, 1),
              use_bias=True,
              data_format='channels_last',
              depthwise_initializer='truncated_normal',
              depthwise_regularizer=None,
              bias_initializer='zeros',
              bias_regularizer=None,
              activation=None,
              **kwargs):
     super(DepthWiseConv2D,
           self).__init__(out_channels=None,
                          kernel_size=kernel_size,
                          data_format=data_format,
                          strides=strides,
                          padding=padding,
                          dilation_rate=dilation_rate,
                          use_bias=use_bias,
                          bias_initializer=bias_initializer,
                          bias_regularizer=bias_regularizer,
                          activation=activation,
                          **kwargs)
     self.depth_multiplier = depth_multiplier
     self.depthwise_initializer = initializers.get(depthwise_initializer)
     self.depthwise_regularizer = regularizers.get(depthwise_regularizer)
Esempio n. 3
0
 def __init__(self,
              units,
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_normal',
              kernel_regularizer=None,
              kernel_constraint=None,
              bias_initializer='zeros',
              bias_regularizer=None,
              bias_constraint=None,
              **kwargs):
     super(Dense, self).__init__(activation=activation, **kwargs)
     self.units = units
     self.use_bias = use_bias
     self.kernel_initializer = initializers.get(kernel_initializer)
     self.kernel_regularizer = regularizers.get(kernel_regularizer)
     self.kernel_constraint = kernel_constraint
     self.bias_initializer = initializers.get(bias_initializer)
     self.bias_regularizer = regularizers.get(bias_regularizer)
     self.bias_constraint = bias_constraint
     self.kernel = None
     self.bias = None
Esempio n. 4
0
 def __init__(self,
              rank,
              out_channels,
              kernel_size,
              strides=1,
              padding='SAME',
              data_format='channels_last',
              activation=None,
              use_bias=True,
              kernel_initializer='truncated_normal',
              kernel_regularizer=None,
              kernel_constraint=None,
              bias_initializer='zeros',
              bias_regularizer=None,
              bias_constraint=None,
              dilation_rate=1,
              **kwargs):
     super(Conv, self).__init__(activation=activation, **kwargs)
     assert isinstance(rank, int) and 3 >= rank >= 1
     self.rank = rank
     self.out_channels = out_channels
     self.data_format = normalize_data_format(data_format, rank)
     self.kernel_size = normalize_tuple(kernel_size, rank, 'kernel size')
     self.strides = normalize_tuple(strides, rank, 'strides')
     self.dilation_rate = normalize_tuple(dilation_rate, rank,
                                          'dilation_rate')
     self.padding = normalize_padding(padding)
     self.use_bias = use_bias
     self.kernel_initializer = initializers.get(kernel_initializer)
     self.kernel_regularizer = regularizers.get(kernel_regularizer)
     self.kernel_constraint = kernel_constraint
     self.bias_initializer = initializers.get(bias_initializer)
     self.bias_regularizer = regularizers.get(bias_regularizer)
     self.bias_constraint = bias_constraint
     self._convolution_op = None
     self.kernel = None
     self.bias = None
Esempio n. 5
0
 def __init__(self,
              rank,
              out_channels,
              kernel_size,
              strides=1,
              padding='SAME',
              data_format='channels_last',
              depth_multiplier=1,
              depthwise_initializer='truncated_normal',
              depthwise_regularizer=None,
              pointwise_initializer='truncated_normal',
              pointwise_regularizer=None,
              bias_initializer='zeros',
              bias_regularizer=None,
              dilation_rate=1,
              use_bias=True,
              activation=None,
              **kwargs):
     super(SeparableConv, self).__init__(rank=rank,
                                         out_channels=out_channels,
                                         kernel_size=kernel_size,
                                         strides=strides,
                                         padding=padding,
                                         data_format=data_format,
                                         bias_initializer=bias_initializer,
                                         bias_regularizer=bias_regularizer,
                                         dilation_rate=dilation_rate,
                                         activation=activation,
                                         use_bias=use_bias,
                                         **kwargs)
     self.depth_multiplier = depth_multiplier
     self.depthwise_regularizer = initializers.get(depthwise_regularizer)
     self.depthwise_initializer = initializers.get(depthwise_initializer)
     self.pointwise_initializer = initializers.get(pointwise_initializer)
     self.pointwise_regularizer = regularizers.get(pointwise_regularizer)
     self.depthwise_kernel = None
     self.pointwise_kernel = None
     self.bias = None
Esempio n. 6
0
 def add_weight(self,
                name,
                shape=None,
                dtype=None,
                initial_value=None,
                initializer=None,
                regularizer=None,
                trainable=None,
                constraint=None,
                **kwargs):
     """
     Add a variable weight to layer
     :param name: Name of weights
     :param shape: Shape of weights
     :param dtype: Data type of weights
     :param initial_value: Initial value of weights
     :param initializer: Initializer for weights
     :param regularizer: Regularizer for weights
     :param trainable: A boolean, whether the weight should
         be trained via backprop or not (assuming
         that the layer itself is also trainable).
     :param constraint: Optional constraint instance
     :return weight itself
     """
     dtype = dtype or self.dtype
     if initial_value is None:
         if shape is None:
             raise ValueError("When initial_value is not specified,"
                              " shape for initializing must be specified.")
         if initializer is None:
             raise ValueError(
                 "When initial_value is not specified,"
                 " initializer for initializing must be specified.")
         initial_value = initializers.get(initializer)(shape, dtype=dtype)
     synchronization = kwargs.get('synchronization',
                                  variables.VariableSynchronization.AUTO)
     if synchronization == variables.VariableSynchronization.ON_READ:
         if trainable:
             raise ValueError("Synchronization value can be set to"
                              " VariableSynchronization.ON_READ only"
                              " for non-trainable variables")
         else:
             trainable = False
     elif trainable is None:
         trainable = True
     weight = variables.Variable(initial_value=initial_value,
                                 trainable=trainable,
                                 dtype=dtype,
                                 constraint=constraint,
                                 name=name,
                                 **kwargs)
     if regularizer is not None:
         with ops.name_scope('weight_regularizer'):
             reg_loss = regularizers.get(regularizer)(weight)
             ops.add_to_collection(fops.GraphKeys.REGULARIZATION_LOSSES,
                                   reg_loss)
     if trainable:
         self._trainable_weights.append(weight)
     else:
         self._non_trainable_weights.append(weight)
     return weight