Esempio n. 1
0
 def testLayerProperties(self):
   layer = base_layers.Layer(name='my_layer')
   self.assertEqual(layer.variables, [])
   self.assertEqual(layer.trainable_variables, [])
   self.assertEqual(layer.non_trainable_variables, [])
   if not context.executing_eagerly():
     # updates, losses only supported in GRAPH mode
     self.assertEqual(layer.updates, [])
     self.assertEqual(layer.losses, [])
   self.assertEqual(layer.built, False)
   layer = base_layers.Layer(name='my_layer', trainable=False)
   self.assertEqual(layer.trainable, False)
Esempio n. 2
0
  def testLayerProperties(self):
    layer = base_layers.Layer(name='my_layer')
    self.assertEqual(layer.variables, [])
    self.assertEqual(layer.trainable_variables, [])
    self.assertEqual(layer.non_trainable_variables, [])
    if not context.executing_eagerly():
      # updates, losses only supported in GRAPH mode
      self.assertEqual(layer.updates, [])
      self.assertEqual(layer.losses, [])
    self.assertEqual(layer.built, False)
    layer = base_layers.Layer(name='my_layer', trainable=False)
    self.assertEqual(layer.trainable, False)

    # Assert that the layer was not instrumented as a Keras layer
    self.assertFalse(layer._instrumented_keras_api)
Esempio n. 3
0
 def testActivityRegularizer(self):
   with ops.Graph().as_default():
     regularizer = math_ops.reduce_sum
     layer = base_layers.Layer(activity_regularizer=regularizer)
     x = array_ops.placeholder('int32')
     layer.apply(x)
     self.assertEqual(len(layer.get_losses_for(x)), 1)
Esempio n. 4
0
  def testAddWeight(self):
    layer = base_layers.Layer(name='my_layer')

    # Test basic variable creation.
    variable = layer.add_variable(
        'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(variable.name, 'my_layer/my_var:0')
    self.assertEqual(layer.variables, [variable])
    self.assertEqual(layer.trainable_variables, [variable])
    self.assertEqual(layer.non_trainable_variables, [])
    if not context.executing_eagerly():
      self.assertEqual(
          layer.variables,
          ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES))

    # Test non-trainable variable creation.
    # layer.add_variable should work even outside `build` and `call`.
    variable_2 = layer.add_variable(
        'non_trainable_var', [2, 2],
        initializer=init_ops.zeros_initializer(),
        trainable=False)
    self.assertEqual(layer.variables, [variable, variable_2])
    self.assertEqual(layer.trainable_variables, [variable])
    self.assertEqual(layer.non_trainable_variables, [variable_2])

    if not context.executing_eagerly():
      self.assertEqual(
          len(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)), 1)

    regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
    _ = layer.add_variable(
        'reg_var', [2, 2],
        initializer=init_ops.zeros_initializer(),
        regularizer=regularizer)
    self.assertEqual(len(layer.losses), 1)

    added_variable = [False]

    # Test that sync `ON_READ` variables are defaulted to be non-trainable.
    variable_3 = layer.add_variable(
        'sync_on_read_var', [2, 2],
        initializer=init_ops.zeros_initializer(),
        synchronization=variable_scope.VariableSynchronization.ON_READ,
        aggregation=variable_scope.VariableAggregation.SUM)
    self.assertEqual(layer.non_trainable_variables, [variable_2, variable_3])

    @def_function.function
    def function_adds_weight():
      if not added_variable[0]:
        layer.add_variable(
            'reg_var_from_function', [2, 2],
            initializer=init_ops.zeros_initializer(),
            regularizer=regularizer)
        added_variable[0] = True

    function_adds_weight()
    self.assertEqual(len(layer.losses), 2)
Esempio n. 5
0
  def testKerasStyleAddWeight(self):
    keras_layer = keras_base_layer.Layer(name='keras_layer')
    with ops.name_scope('foo', skip_on_eager=False):
      keras_variable = keras_layer.add_variable(
          'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(keras_variable.name, 'foo/my_var:0')

    with ops.name_scope('baz', skip_on_eager=False):
      old_style_layer = base_layers.Layer(name='my_layer')
      # Test basic variable creation.
      variable = old_style_layer.add_variable(
          'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(variable.name, 'my_layer/my_var:0')

    with base_layers.keras_style_scope():
      layer = base_layers.Layer(name='my_layer')
    # Test basic variable creation.
    with ops.name_scope('bar', skip_on_eager=False):
      variable = layer.add_variable(
          'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(variable.name, 'bar/my_var:0')
Esempio n. 6
0
  def testInvalidTrainableSynchronizationCombination(self):
    layer = base_layers.Layer(name='my_layer')

    with self.assertRaisesRegex(
        ValueError, 'Synchronization value can be set to '
        'VariableSynchronization.ON_READ only for non-trainable variables. '
        'You have specified trainable=True and '
        'synchronization=VariableSynchronization.ON_READ.'):
      _ = layer.add_variable(
          'v', [2, 2],
          initializer=init_ops.zeros_initializer(),
          synchronization=variable_scope.VariableSynchronization.ON_READ,
          trainable=True)
Esempio n. 7
0
  def testKerasStyleAddWeight(self):
    keras_layer = keras_base_layer.Layer(name='keras_layer')
    with backend.name_scope('foo'):
      keras_variable = keras_layer.add_variable(
          'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(keras_variable.name, 'foo/my_var:0')

    with backend.name_scope('baz'):
      old_style_layer = base_layers.Layer(name='my_layer')
      # Test basic variable creation.
      variable = old_style_layer.add_variable(
          'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(variable.name, 'my_layer/my_var:0')

    with base_layers.keras_style_scope():
      layer = base_layers.Layer(name='my_layer')
    # Assert that the layer was not instrumented as a Keras layer
    self.assertFalse(layer._instrumented_keras_api)
    # Test basic variable creation.
    with backend.name_scope('bar'):
      variable = layer.add_variable(
          'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(variable.name, 'bar/my_var:0')
Esempio n. 8
0
 def testReusePartitionedVariablesAndRegularizers(self):
   regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
   partitioner = partitioned_variables.fixed_size_partitioner(3)
   for reuse in [False, True]:
     with variable_scope.variable_scope(variable_scope.get_variable_scope(),
                                        partitioner=partitioner,
                                        reuse=reuse):
       layer = base_layers.Layer(name='my_layer')
       _ = layer.add_variable(
           'reg_part_var', [4, 4],
           initializer=init_ops.zeros_initializer(),
           regularizer=regularizer)
   self.assertEqual(
       len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 3)
Esempio n. 9
0
 def testInt64Layer(self):
   layer = base_layers.Layer(name='my_layer', dtype='int64')
   layer.add_variable('my_var', [2, 2])
   self.assertEqual(layer.name, 'my_layer')