Exemple #1
0
    def testKerasStyleAddWeight(self):
        keras_layer = keras_base_layer.Layer(name="keras_layer")
        with backend.name_scope("foo"):
            keras_variable = keras_layer.add_weight(
                "my_var", [2, 2], initializer=tf.compat.v1.zeros_initializer()
            )
        self.assertEqual(keras_variable.name, "foo/my_var:0")

        with backend.name_scope("baz"):
            old_style_layer = base_tf_layers.Layer(name="my_layer")
            # Test basic variable creation.
            variable = old_style_layer.add_weight(
                "my_var", [2, 2], initializer=tf.compat.v1.zeros_initializer()
            )
        self.assertEqual(variable.name, "my_layer/my_var:0")

        with base_tf_layers.keras_style_scope():
            layer = base_tf_layers.Layer(name="my_layer")
        # Assert that the layer was not instrumented as a Keras layer
        self.assertFalse(layer._instrumented_keras_api)
        # Test basic variable creation.
        with backend.name_scope("bar"):
            variable = layer.add_weight(
                "my_var", [2, 2], initializer=tf.compat.v1.zeros_initializer()
            )
        self.assertEqual(variable.name, "bar/my_var:0")
Exemple #2
0
  def testLayerProperties(self):
    layer = base_layers.Layer(name='my_layer')
    self.assertEqual(layer.variables, [])
    self.assertEqual(layer.trainable_variables, [])
    self.assertEqual(layer.non_trainable_variables, [])
    if not tf.executing_eagerly():
      # updates, losses only supported in GRAPH mode
      self.assertEqual(layer.updates, [])
      self.assertEqual(layer.losses, [])
    self.assertEqual(layer.built, False)
    layer = base_layers.Layer(name='my_layer', trainable=False)
    self.assertEqual(layer.trainable, False)

    # Assert that the layer was not instrumented as a Keras layer
    self.assertFalse(layer._instrumented_keras_api)
Exemple #3
0
 def testActivityRegularizer(self):
   with tf.Graph().as_default():
     regularizer = tf.reduce_sum
     layer = base_tf_layers.Layer(activity_regularizer=regularizer)
     x = tf.compat.v1.placeholder('int32')
     layer(x)
     self.assertEqual(len(layer.get_losses_for(x)), 1)
Exemple #4
0
 def testReusePartitionedVariablesAndRegularizers(self):
     with tf.Graph().as_default():
         regularizer = lambda x: tf.reduce_sum(x) * 1e-3
         partitioner = tf.compat.v1.fixed_size_partitioner(3)
         for reuse in [False, True]:
             with tf.compat.v1.variable_scope(
                 tf.compat.v1.get_variable_scope(),
                 partitioner=partitioner,
                 reuse=reuse,
             ):
                 layer = base_tf_layers.Layer(name="my_layer")
                 _ = layer.add_weight(
                     "reg_part_var",
                     [4, 4],
                     initializer=tf.compat.v1.zeros_initializer(),
                     regularizer=regularizer,
                 )
         self.assertEqual(
             len(
                 tf.compat.v1.get_collection(
                     tf.compat.v1.GraphKeys.REGULARIZATION_LOSSES
                 )
             ),
             3,
         )
Exemple #5
0
    def testAddWeight(self):
        layer = base_layers.Layer(name='my_layer')

        # Test basic variable creation.
        variable = layer.add_variable(
            'my_var', [2, 2], initializer=tf.compat.v1.zeros_initializer())
        self.assertEqual(variable.name, 'my_layer/my_var:0')
        self.assertEqual(layer.variables, [variable])
        self.assertEqual(layer.trainable_variables, [variable])
        self.assertEqual(layer.non_trainable_variables, [])
        if not tf.executing_eagerly():
            self.assertEqual(
                layer.variables,
                tf.compat.v1.get_collection(
                    tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES))

        # Test non-trainable variable creation.
        # layer.add_variable should work even outside `build` and `call`.
        variable_2 = layer.add_variable(
            'non_trainable_var', [2, 2],
            initializer=tf.compat.v1.zeros_initializer(),
            trainable=False)
        self.assertEqual(layer.variables, [variable, variable_2])
        self.assertEqual(layer.trainable_variables, [variable])
        self.assertEqual(layer.non_trainable_variables, [variable_2])

        if not tf.executing_eagerly():
            self.assertEqual(
                len(
                    tf.compat.v1.get_collection(
                        tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES)), 1)

        regularizer = lambda x: tf.reduce_sum(x) * 1e-3
        _ = layer.add_variable('reg_var', [2, 2],
                               initializer=tf.compat.v1.zeros_initializer(),
                               regularizer=regularizer)
        self.assertEqual(len(layer.losses), 1)

        added_variable = [False]

        # Test that sync `ON_READ` variables are defaulted to be non-trainable.
        variable_3 = layer.add_variable(
            'sync_on_read_var', [2, 2],
            initializer=tf.compat.v1.zeros_initializer(),
            synchronization=tf.VariableSynchronization.ON_READ,
            aggregation=tf.compat.v1.VariableAggregation.SUM)
        self.assertEqual(layer.non_trainable_variables,
                         [variable_2, variable_3])

        @tf.function
        def function_adds_weight():
            if not added_variable[0]:
                layer.add_variable(
                    'reg_var_from_function', [2, 2],
                    initializer=tf.compat.v1.zeros_initializer(),
                    regularizer=regularizer)
                added_variable[0] = True

        function_adds_weight()
        self.assertEqual(len(layer.losses), 2)
Exemple #6
0
  def testLayerProperties(self):
    layer = base_tf_layers.Layer(name='my_layer')
    self.assertEqual(layer.variables, [])
    self.assertEqual(layer.trainable_variables, [])
    self.assertEqual(layer.non_trainable_variables, [])
    if not tf.executing_eagerly():
      # updates, losses only supported in GRAPH mode
      self.assertEqual(layer.updates, [])
      self.assertEqual(layer.losses, [])
    self.assertEqual(layer.built, False)
    layer = base_tf_layers.Layer(name='my_layer', trainable=False)
    self.assertEqual(layer.trainable, False)

    # Assert that the layer was not instrumented as a Keras layer
    self.assertFalse(layer._instrumented_keras_api)

    # Assert this was instrumented as a legacy layer
    self.assertTrue(
        keras_base_layer.keras_api_gauge.get_cell('legacy_layer').value())
    keras_base_layer.keras_api_gauge.get_cell('legacy_layer').set(False)
Exemple #7
0
  def testInvalidTrainableSynchronizationCombination(self):
    layer = base_tf_layers.Layer(name='my_layer')

    with self.assertRaisesRegex(
        ValueError, 'Synchronization value can be set to '
        'VariableSynchronization.ON_READ only for non-trainable variables. '
        'You have specified trainable=True and '
        'synchronization=VariableSynchronization.ON_READ.'):
      _ = layer.add_weight(
          'v', [2, 2],
          initializer=tf.compat.v1.zeros_initializer(),
          synchronization=tf.VariableSynchronization.ON_READ,
          trainable=True)
Exemple #8
0
    def testKerasStyleAddWeight(self):
        keras_layer = keras_base_layer.Layer(name='keras_layer')
        with backend.name_scope('foo'):
            keras_variable = keras_layer.add_variable(
                'my_var', [2, 2], initializer=tf.compat.v1.zeros_initializer())
        self.assertEqual(keras_variable.name, 'foo/my_var:0')

        with backend.name_scope('baz'):
            old_style_layer = base_layers.Layer(name='my_layer')
            # Test basic variable creation.
            variable = old_style_layer.add_variable(
                'my_var', [2, 2], initializer=tf.compat.v1.zeros_initializer())
        self.assertEqual(variable.name, 'my_layer/my_var:0')

        with base_layers.keras_style_scope():
            layer = base_layers.Layer(name='my_layer')
        # Assert that the layer was not instrumented as a Keras layer
        self.assertFalse(layer._instrumented_keras_api)
        # Test basic variable creation.
        with backend.name_scope('bar'):
            variable = layer.add_variable(
                'my_var', [2, 2], initializer=tf.compat.v1.zeros_initializer())
        self.assertEqual(variable.name, 'bar/my_var:0')
Exemple #9
0
    def testInvalidTrainableSynchronizationCombination(self):
        layer = base_tf_layers.Layer(name="my_layer")

        with self.assertRaisesRegex(
            ValueError,
            "Synchronization value can be set to "
            "VariableSynchronization.ON_READ only for non-trainable variables. "
            "You have specified trainable=True and "
            "synchronization=VariableSynchronization.ON_READ.",
        ):
            _ = layer.add_weight(
                "v",
                [2, 2],
                initializer=tf.compat.v1.zeros_initializer(),
                synchronization=tf.VariableSynchronization.ON_READ,
                trainable=True,
            )
Exemple #10
0
 def testInt64Layer(self):
   layer = base_tf_layers.Layer(name='my_layer', dtype='int64')
   layer.add_weight('my_var', [2, 2])
   self.assertEqual(layer.name, 'my_layer')
Exemple #11
0
 def testInt64Layer(self):
     layer = base_tf_layers.Layer(name="my_layer", dtype="int64")
     layer.add_weight("my_var", [2, 2])
     self.assertEqual(layer.name, "my_layer")