示例#1
0
 def __init__(self,
              filters,
              kernel_size,
              strides=(1, 1),
              padding='valid',
              data_format=None,
              dilation_rate=(1, 1),
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              **kwargs):
     super(Conv2D, self).__init__(
         filters=filters,
         kernel_size=kernel_size,
         strides=strides,
         padding=padding,
         data_format=data_format,
         dilation_rate=dilation_rate,
         activation=getters.get_activation(activation) if activation else activation,
         use_bias=use_bias,
         kernel_initializer=getters.get_initializer(kernel_initializer),
         bias_initializer=getters.get_initializer(bias_initializer),
         kernel_regularizer=getters.get_regularizer(kernel_regularizer),
         bias_regularizer=getters.get_regularizer(bias_regularizer),
         activity_regularizer=getters.get_regularizer(activity_regularizer),
         kernel_constraint=getters.get_constraint(kernel_constraint),
         bias_constraint=getters.get_constraint(bias_constraint),
         **kwargs)
示例#2
0
 def __init__(self,
              filters,
              kernel_size,
              strides=(1, 1),
              padding='valid',
              data_format=None,
              depth_multiplier=1,
              activation=None,
              use_bias=True,
              depthwise_initializer='glorot_uniform',
              pointwise_initializer='glorot_uniform',
              bias_initializer='zeros',
              depthwise_regularizer=None,
              pointwise_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              depthwise_constraint=None,
              pointwise_constraint=None,
              bias_constraint=None,
              **kwargs):
     super(SeparableConv2D, self).__init__(
         filters=filters,
         kernel_size=kernel_size,
         strides=strides,
         padding=padding,
         data_format=data_format,
         depth_multiplier=depth_multiplier,
         activation=getters.get_activation(activation),
         use_bias=use_bias,
         depthwise_initializer=getters.get_initializer(
             depthwise_initializer),
         pointwise_initializer=getters.get_initializer(
             pointwise_initializer),
         bias_initializer=getters.get_initializer(bias_initializer),
         depthwise_regularizer=getters.get_regularizer(
             depthwise_regularizer),
         pointwise_regularizer=getters.get_regularizer(
             pointwise_regularizer),
         bias_regularizer=getters.get_regularizer(bias_regularizer),
         activity_regularizer=getters.get_regularizer(activity_regularizer),
         depthwise_constraint=getters.get_constraint(depthwise_constraint),
         pointwise_constraint=getters.get_constraint(pointwise_constraint),
         bias_constraint=getters.get_constraint(bias_constraint),
         **kwargs)
示例#3
0
文件: core.py 项目: chandu088/p
    def _build(self, incoming, *args, **kwargs):
        """
        Args:
            incoming: (2+)-D Tensor [samples, input dim]. If not 2D, input will be flatten.

        Returns:
            2D Tensor [samples, num_units].
        """
        self._declare_dependencies()
        input_shape = get_shape(incoming)
        incoming = validate_dtype(incoming)

        assert len(
            input_shape) > 1, 'Incoming Tensor shape must be at least 2-D'
        n_inputs = total_tensor_depth(tensor_shape=input_shape)

        regularizer = getters.get_regularizer(self.regularizer,
                                              scale=self.scale,
                                              collect=True)
        self._w = variable(name='w',
                           shape=[n_inputs, self.num_units],
                           dtype=incoming.dtype,
                           regularizer=regularizer,
                           initializer=getters.get_initializer(
                               self.weights_init),
                           trainable=self.trainable,
                           restore=self.restore)
        track(self._w, tf.GraphKeys.LAYER_VARIABLES, self.module_name)

        inference = incoming
        # If input is not 2d, flatten it.
        if len(input_shape) > 2:
            inference = tf.reshape(tensor=inference, shape=[-1, n_inputs])
        inference = tf.matmul(a=inference, b=self._w)

        self._b = None
        if self.bias:
            self._b = variable(name='b',
                               shape=[self.num_units],
                               dtype=incoming.dtype,
                               initializer=getters.get_initializer(
                                   self.bias_init),
                               trainable=self.trainable,
                               restore=self.restore)
            track(self._b, tf.GraphKeys.LAYER_VARIABLES, self.module_name)
            inference = tf.nn.bias_add(value=inference, bias=self._b)

        if self.activation:
            inference = getters.get_activation(self.activation,
                                               collect=True)(inference)

        if self._dropout:
            inference = self._dropout(inference)

        track(inference, tf.GraphKeys.LAYER_TENSOR, self.module_name)
        return inference
示例#4
0
 def __init__(self,
              input_dim,
              output_dim,
              embeddings_initializer='uniform',
              embeddings_regularizer=None,
              activity_regularizer=None,
              embeddings_constraint=None,
              mask_zero=False,
              input_length=None,
              **kwargs):
     super(Embedding, self).__init__(
         input_dim=input_dim,
         output_dim=output_dim,
         embeddings_initializer=getters.get_initializer(embeddings_initializer),
         embeddings_regularizer=getters.get_regularizer(embeddings_regularizer),
         activity_regularizer=getters.get_regularizer(activity_regularizer),
         embeddings_constraint=getters.get_constraint(embeddings_constraint),
         mask_zero=mask_zero,
         input_length=input_length,
         **kwargs)
示例#5
0
 def __init__(self,
              units,
              activation='tanh',
              recurrent_activation='hard_sigmoid',
              use_bias=True,
              kernel_initializer='glorot_uniform',
              recurrent_initializer='orthogonal',
              bias_initializer='zeros',
              unit_forget_bias=True,
              kernel_regularizer=None,
              recurrent_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              recurrent_constraint=None,
              bias_constraint=None,
              dropout=0.,
              recurrent_dropout=0.,
              **kwargs):
     super(LSTM, self).__init__(
         units=units,
         activation=getters.get_activation(activation),
         recurrent_activation=getters.get_activation(recurrent_activation),
         use_bias=use_bias,
         kernel_initializer=getters.get_initializer(kernel_initializer),
         recurrent_initializer=getters.get_initializer(
             recurrent_initializer),
         bias_initializer=getters.get_initializer(bias_initializer),
         unit_forget_bias=unit_forget_bias,
         kernel_regularizer=getters.get_regularizer(kernel_regularizer),
         recurrent_regularizer=getters.get_regularizer(
             recurrent_regularizer),
         bias_regularizer=getters.get_regularizer(bias_regularizer),
         activity_regularizer=getters.get_regularizer(activity_regularizer),
         kernel_constraint=getters.get_constraint(kernel_constraint),
         recurrent_constraint=getters.get_constraint(recurrent_constraint),
         bias_constraint=getters.get_constraint(bias_constraint),
         dropout=dropout,
         recurrent_dropout=recurrent_dropout,
         **kwargs)
示例#6
0
    def _build(self, incoming, *args, **kwargs):
        """
        Args:
            incoming: (2+)-D Tensor [samples, input dim]. If not 2D, input will be flatten.

        Returns:
            2D Tensor [samples, num_units].
        """
        self._declare_dependencies()
        input_shape = get_shape(incoming)
        assert len(input_shape) > 1, 'Incoming Tensor shape must be at least 2-D'
        n_inputs = total_tensor_depth(tensor_shape=input_shape)

        regularizer = getters.get_regularizer(self.regularizer, scale=self.scale, collect=True)
        initializer = getters.get_initializer(self.weights_init)
        self._w = variable(name='w', shape=[n_inputs, self.num_units], regularizer=regularizer,
                           initializer=initializer, trainable=self.trainable,
                           restore=self.restore)
        track(self._w, tf.GraphKeys.LAYER_VARIABLES, self.module_name)

        self._b = variable(name='b', shape=[self.num_units],
                           initializer=getters.get_initializer(self.bias_init),
                           trainable=self.trainable, restore=self.restore)
        track(self._b, tf.GraphKeys.LAYER_VARIABLES, self.module_name)

        # Weight and bias for the transform gate
        self._w_t = variable(name='w_t', shape=[n_inputs, self.num_units],
                             regularizer=None, initializer=initializer,
                             trainable=self.trainable, restore=self.restore)
        track(self._w_t, tf.GraphKeys.LAYER_VARIABLES, self.module_name)

        self._b_t = variable(name='b_t', shape=[self.num_units],
                             initializer=tf.constant_initializer(-1),
                             trainable=self.trainable, restore=self.restore)
        track(self._b_t, tf.GraphKeys.LAYER_VARIABLES, self.module_name)

        # If input is not 2d, flatten it.
        if len(input_shape) > 2:
            incoming = tf.reshape(tensor=incoming, shape=[-1, n_inputs])

        H = getters.get_activation(self.activation)(tf.matmul(a=incoming, b=self._w) + self._b)
        T = tf.sigmoid(tf.matmul(a=incoming, b=self._w_t) + self._b_t)
        if self._transform_dropout:
            T = self._transform_dropout(T)
        C = tf.subtract(x=1.0, y=T)
        inference = tf.add(x=tf.multiply(x=H, y=T), y=tf.multiply(x=incoming, y=C))
        track(inference, tf.GraphKeys.ACTIVATIONS)

        track(inference, tf.GraphKeys.LAYER_TENSOR, self.module_name)
        return inference
示例#7
0
def variable(name,
             shape=None,
             dtype=tf.float32,
             initializer=None,
             regularizer=None,
             trainable=True,
             collections=None,
             device='',
             restore=True):
    """Instantiate a new variable.

    Args:
        name: `str`. A name for this variable.
        shape: list of `int`. The variable shape (optional).
        dtype: `type`. The variable data type.
        initializer: `str` or `Tensor`. The variable initialization.
        regularizer: `str` or `Tensor`. The variable regularizer.
        trainable: `bool`. If True, this variable weights will be trained.
        collections: `str`. A collection to add the new variable to (optional).
        device: `str`. Device ID to store the variable. Default: '/cpu:0'.
        restore: `bool`. Restore or not this variable when loading a pre-trained model.

    Returns:
        A Variable.
    """

    if isinstance(initializer, six.string_types):
        initializer = getters.get_initializer(initializer)
    # Remove shape param if initializer is a Tensor
    if not callable(initializer) and isinstance(initializer, tf.Tensor):
        shape = None

    if isinstance(regularizer, six.string_types):
        regularizer = getters.get_regularizer(regularizer)

    with tf.device(device_name_or_function=device):
        var = tf.get_variable(name=name,
                              shape=shape,
                              dtype=dtype,
                              initializer=initializer,
                              regularizer=regularizer,
                              trainable=trainable,
                              collections=collections)

        if not restore:
            # TODO adapt restoring saver
            tf.add_to_collection(name=tf.GraphKeys.EXCL_RESTORE_VARIABLES,
                                 value=var)

        return var
示例#8
0
 def __init__(self,
              filters,
              kernel_size,
              strides=(1, 1),
              padding='valid',
              data_format=None,
              dilation_rate=(1, 1),
              activation='tanh',
              recurrent_activation='hard_sigmoid',
              use_bias=True,
              kernel_initializer='glorot_uniform',
              recurrent_initializer='orthogonal',
              bias_initializer='zeros',
              unit_forget_bias=True,
              kernel_regularizer=None,
              recurrent_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              recurrent_constraint=None,
              bias_constraint=None,
              return_sequences=False,
              go_backwards=False,
              stateful=False,
              dropout=0.,
              recurrent_dropout=0.,
              **kwargs):
     super(ConvLSTM2D, self).__init__(
         filters,
         kernel_size,
         strides=strides,
         padding=padding,
         data_format=data_format,
         dilation_rate=dilation_rate,
         activation=getters.get_activation(activation),
         recurrent_activation=getters.get_activation(recurrent_activation),
         use_bias=use_bias,
         kernel_initializer=getters.get_initializer(kernel_initializer),
         recurrent_initializer=getters.get_initializer(
             recurrent_initializer),
         bias_initializer=getters.get_initializer(bias_initializer),
         unit_forget_bias=unit_forget_bias,
         kernel_regularizer=getters.get_regularizer(kernel_regularizer),
         recurrent_regularizer=getters.get_regularizer(
             recurrent_regularizer),
         bias_regularizer=getters.get_regularizer(bias_regularizer),
         activity_regularizer=getters.get_regularizer(activity_regularizer),
         kernel_constraint=getters.get_regularizer(kernel_constraint),
         recurrent_constraint=getters.get_regularizer(recurrent_constraint),
         bias_constraint=getters.get_constraint(bias_constraint),
         return_sequences=return_sequences,
         go_backwards=go_backwards,
         stateful=stateful,
         dropout=dropout,
         recurrent_dropout=recurrent_dropout,
         **kwargs)
示例#9
0
 def __init__(self,
              units,
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              **kwargs):
     super(Dense, self).__init__(
         units,
         activation=getters.get_activation(activation) if activation else activation,
         use_bias=use_bias,
         kernel_initializer=getters.get_initializer(kernel_initializer),
         bias_initializer=getters.get_initializer(bias_initializer),
         kernel_regularizer=getters.get_regularizer(kernel_regularizer),
         bias_regularizer=getters.get_regularizer(bias_regularizer),
         activity_regularizer=getters.get_regularizer(activity_regularizer),
         kernel_constraint=getters.get_constraint(kernel_constraint),
         bias_constraint=getters.get_constraint(bias_constraint),
         **kwargs)
示例#10
0
    def _build(self, incoming, *args, **kwargs):
        """
        Args:
            incoming: (2+)-D Tensor [samples, input dim]. If not 2D, input will be flatten.

        Returns:
            2D Tensor [samples, num_units].
        """
        self._declare_dependencies()
        input_shape = get_shape(incoming)
        incoming = validate_dtype(incoming)

        assert len(input_shape) > 1, 'Incoming Tensor shape must be at least 2-D'
        n_inputs = total_tensor_depth(tensor_shape=input_shape)

        regularizer = getters.get_regularizer(self.regularizer, scale=self.scale, collect=True)
        self._w = variable(
            name='w', shape=[n_inputs, self.num_units], dtype=incoming.dtype, regularizer=regularizer,
            initializer=getters.get_initializer(self.weights_init), trainable=self.trainable,
            restore=self.restore)
        track(self._w, tf.GraphKeys.LAYER_VARIABLES, self.module_name)

        inference = incoming
        # If input is not 2d, flatten it.
        if len(input_shape) > 2:
            inference = tf.reshape(tensor=inference, shape=[-1, n_inputs])
        inference = tf.matmul(a=inference, b=self._w)

        self._b = None
        if self.bias:
            self._b = variable(name='b', shape=[self.num_units], dtype=incoming.dtype,
                               initializer=getters.get_initializer(self.bias_init),
                               trainable=self.trainable, restore=self.restore)
            track(self._b, tf.GraphKeys.LAYER_VARIABLES, self.module_name)
            inference = tf.nn.bias_add(value=inference, bias=self._b)

        if self.activation:
            inference = getters.get_activation(self.activation, collect=True)(inference)

        if self._dropout:
            inference = self._dropout(inference)

        track(inference, tf.GraphKeys.LAYER_TENSOR, self.module_name)
        return inference
示例#11
0
def variable(name, shape=None, dtype=tf.float32, initializer=None, regularizer=None,
             trainable=True, collections=None, device='', restore=True):
    """Instantiate a new variable.

    Args:
        name: `str`. A name for this variable.
        shape: list of `int`. The variable shape (optional).
        dtype: `type`. The variable data type.
        initializer: `str` or `Tensor`. The variable initialization.
        regularizer: `str` or `Tensor`. The variable regularizer.
        trainable: `bool`. If True, this variable weights will be trained.
        collections: `str`. A collection to add the new variable to (optional).
        device: `str`. Device ID to store the variable. Default: '/cpu:0'.
        restore: `bool`. Restore or not this variable when loading a pre-trained model.

    Returns:
        A Variable.
    """

    if isinstance(initializer, six.string_types):
        initializer = getters.get_initializer(initializer)
    # Remove shape param if initializer is a Tensor
    if not callable(initializer) and isinstance(initializer, tf.Tensor):
        shape = None

    if isinstance(regularizer, six.string_types):
        regularizer = getters.get_regularizer(regularizer)

    with tf.device(device_name_or_function=device):
        var = tf.get_variable(name=name,
                              shape=shape,
                              dtype=dtype,
                              initializer=initializer,
                              regularizer=regularizer,
                              trainable=trainable,
                              collections=collections)

        if not restore:
            tf.add_to_collection(name=tf.GraphKeys.EXCL_RESTORE_VARIABLES, value=var)  # @TODO adapt restoring saver

        return var