コード例 #1
0
ファイル: core.py プロジェクト: gridl/polyaxon-lib
 def __init__(self,
              units,
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              **kwargs):
     super(Dense, self).__init__(
         units,
         activation=getters.get_activation(activation)
         if activation else activation,
         use_bias=use_bias,
         kernel_initializer=getters.get_initializer(kernel_initializer),
         bias_initializer=getters.get_initializer(bias_initializer),
         kernel_regularizer=getters.get_regularizer(kernel_regularizer),
         bias_regularizer=getters.get_regularizer(bias_regularizer),
         activity_regularizer=getters.get_regularizer(activity_regularizer),
         kernel_constraint=getters.get_constraint(kernel_constraint),
         bias_constraint=getters.get_constraint(bias_constraint),
         **kwargs)
コード例 #2
0
ファイル: convolutional.py プロジェクト: gridl/polyaxon-lib
 def __init__(self,
              filters,
              kernel_size,
              strides=(1, 1),
              padding='valid',
              data_format=None,
              dilation_rate=(1, 1),
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              kernel_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              bias_constraint=None,
              **kwargs):
     super(Conv2D, self).__init__(
         filters=filters,
         kernel_size=kernel_size,
         strides=strides,
         padding=padding,
         data_format=data_format,
         dilation_rate=dilation_rate,
         activation=getters.get_activation(activation)
         if activation else activation,
         use_bias=use_bias,
         kernel_initializer=getters.get_initializer(kernel_initializer),
         bias_initializer=getters.get_initializer(bias_initializer),
         kernel_regularizer=getters.get_regularizer(kernel_regularizer),
         bias_regularizer=getters.get_regularizer(bias_regularizer),
         activity_regularizer=getters.get_regularizer(activity_regularizer),
         kernel_constraint=getters.get_constraint(kernel_constraint),
         bias_constraint=getters.get_constraint(bias_constraint),
         **kwargs)
コード例 #3
0
ファイル: recurrent.py プロジェクト: gridl/polyaxon-lib
 def __init__(self,
              units,
              activation='tanh',
              use_bias=True,
              kernel_initializer='glorot_uniform',
              recurrent_initializer='orthogonal',
              bias_initializer='zeros',
              kernel_regularizer=None,
              recurrent_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              kernel_constraint=None,
              recurrent_constraint=None,
              bias_constraint=None,
              dropout=0.,
              recurrent_dropout=0.,
              **kwargs):
     super(SimpleRNN, self).__init__(
         units=units,
         activation=getters.get_activation(activation),
         use_bias=use_bias,
         kernel_initializer=getters.get_initializer(kernel_initializer),
         recurrent_initializer=getters.get_initializer(recurrent_initializer),
         bias_initializer=getters.get_initializer(bias_initializer),
         kernel_regularizer=getters.get_regularizer(kernel_regularizer),
         recurrent_regularizer=getters.get_regularizer(recurrent_regularizer),
         bias_regularizer=getters.get_regularizer(bias_regularizer),
         activity_regularizer=getters.get_regularizer(activity_regularizer),
         kernel_constraint=getters.get_constraint(kernel_constraint),
         recurrent_constraint=getters.get_constraint(recurrent_constraint),
         bias_constraint=getters.get_constraint(bias_constraint),
         dropout=dropout,
         recurrent_dropout=recurrent_dropout,
         **kwargs)
コード例 #4
0
def create_global_counter(collection, name, graph=None):
    """Create global counter tensor in graph.

    Args:
        collection: the counter's collection.
        name: the counter's name.
        graph: The graph in which to create the global counter tensor. If missing,
        use default graph.

    Returns:
        Global step tensor.

    Raises:
        ValueError: if global counter tensor is already defined.
    """
    graph = graph or tf.get_default_graph()
    if get_global_counter(collection, name, graph) is not None:
        raise ValueError("`{}` already exists.".format(collection))
    # Create in proper graph and base name_scope.
    with graph.as_default() as g, g.name_scope(None):
        return variable(
            collection,
            shape=[],
            dtype=tf.int64,
            initializer=getters.get_initializer('zeros', dtype=tf.int64),
            trainable=False,
            collections=[tf.GraphKeys.GLOBAL_VARIABLES, collection])
コード例 #5
0
ファイル: convolutional.py プロジェクト: gridl/polyaxon-lib
 def __init__(self,
              filters,
              kernel_size,
              strides=(1, 1),
              padding='valid',
              data_format=None,
              depth_multiplier=1,
              activation=None,
              use_bias=True,
              depthwise_initializer='glorot_uniform',
              pointwise_initializer='glorot_uniform',
              bias_initializer='zeros',
              depthwise_regularizer=None,
              pointwise_regularizer=None,
              bias_regularizer=None,
              activity_regularizer=None,
              depthwise_constraint=None,
              pointwise_constraint=None,
              bias_constraint=None,
              **kwargs):
     super(SeparableConv2D, self).__init__(
         filters=filters,
         kernel_size=kernel_size,
         strides=strides,
         padding=padding,
         data_format=data_format,
         depth_multiplier=depth_multiplier,
         activation=getters.get_activation(activation),
         use_bias=use_bias,
         depthwise_initializer=getters.get_initializer(
             depthwise_initializer),
         pointwise_initializer=getters.get_initializer(
             pointwise_initializer),
         bias_initializer=getters.get_initializer(bias_initializer),
         depthwise_regularizer=getters.get_regularizer(
             depthwise_regularizer),
         pointwise_regularizer=getters.get_regularizer(
             pointwise_regularizer),
         bias_regularizer=getters.get_regularizer(bias_regularizer),
         activity_regularizer=getters.get_regularizer(activity_regularizer),
         depthwise_constraint=getters.get_constraint(depthwise_constraint),
         pointwise_constraint=getters.get_constraint(pointwise_constraint),
         bias_constraint=getters.get_constraint(bias_constraint),
         **kwargs)
コード例 #6
0
ファイル: variables.py プロジェクト: gridl/polyaxon-lib
def variable(name,
             shape=None,
             dtype=tf.float32,
             initializer=None,
             regularizer=None,
             trainable=True,
             collections=None,
             device='',
             restore=True):
    """Instantiate a new variable.

    Args:
        name: `str`. A name for this variable.
        shape: list of `int`. The variable shape (optional).
        dtype: `type`. The variable data type.
        initializer: `str` or `Tensor`. The variable initialization.
        regularizer: `str` or `Tensor`. The variable regularizer.
        trainable: `bool`. If True, this variable weights will be trained.
        collections: `str`. A collection to add the new variable to (optional).
        device: `str`. Device ID to store the variable. Default: '/cpu:0'.
        restore: `bool`. Restore or not this variable when loading a pre-trained model.

    Returns:
        A Variable.
    """

    if isinstance(initializer, six.string_types):
        initializer = getters.get_initializer(initializer)
    # Remove shape param if initializer is a Tensor
    if not callable(initializer) and isinstance(initializer, tf.Tensor):
        shape = None

    if isinstance(regularizer, six.string_types):
        regularizer = getters.get_regularizer(regularizer)

    with tf.device(device_name_or_function=device):
        var = tf.get_variable(name=name,
                              shape=shape,
                              dtype=dtype,
                              initializer=initializer,
                              regularizer=regularizer,
                              trainable=trainable,
                              collections=collections)

        if not restore:
            # TODO adapt restoring saver
            tf.add_to_collection(name=tf.GraphKeys.EXCL_RESTORE_VARIABLES,
                                 value=var)

        return var
コード例 #7
0
 def __init__(self,
              input_dim,
              output_dim,
              embeddings_initializer='uniform',
              embeddings_regularizer=None,
              activity_regularizer=None,
              embeddings_constraint=None,
              mask_zero=False,
              input_length=None,
              **kwargs):
     super(Embedding, self).__init__(
         input_dim=input_dim,
         output_dim=output_dim,
         embeddings_initializer=getters.get_initializer(
             embeddings_initializer),
         embeddings_regularizer=getters.get_regularizer(
             embeddings_regularizer),
         activity_regularizer=getters.get_regularizer(activity_regularizer),
         embeddings_constraint=getters.get_constraint(
             embeddings_constraint),
         mask_zero=mask_zero,
         input_length=input_length,
         **kwargs)