def testKerasStyleAddWeight(self): keras_layer = keras_base_layer.Layer(name="keras_layer") with backend.name_scope("foo"): keras_variable = keras_layer.add_weight( "my_var", [2, 2], initializer=tf.compat.v1.zeros_initializer() ) self.assertEqual(keras_variable.name, "foo/my_var:0") with backend.name_scope("baz"): old_style_layer = base_tf_layers.Layer(name="my_layer") # Test basic variable creation. variable = old_style_layer.add_weight( "my_var", [2, 2], initializer=tf.compat.v1.zeros_initializer() ) self.assertEqual(variable.name, "my_layer/my_var:0") with base_tf_layers.keras_style_scope(): layer = base_tf_layers.Layer(name="my_layer") # Assert that the layer was not instrumented as a Keras layer self.assertFalse(layer._instrumented_keras_api) # Test basic variable creation. with backend.name_scope("bar"): variable = layer.add_weight( "my_var", [2, 2], initializer=tf.compat.v1.zeros_initializer() ) self.assertEqual(variable.name, "bar/my_var:0")
def __init__(self, name=None, enable_histograms=True): super(CustomModel, self).__init__() self._my_layers = [ layer_lib.Dense( 4096, name='dense1', kernel_initializer=tf.compat.v1.glorot_normal_initializer(seed=0), use_bias=False), layer_lib.Dense( 4, name='dense2', kernel_initializer=tf.compat.v1.glorot_normal_initializer(seed=0), use_bias=False), ] if enable_histograms: self.histogram_summary_layer = LayerForHistogramSummary() else: self.histogram_summary_layer = base_layer.Layer() # no-op pass through self.scalar_summary_layer = LayerForScalarSummary()
def testKerasStyleAddWeight(self): keras_layer = keras_base_layer.Layer(name='keras_layer') with backend.name_scope('foo'): keras_variable = keras_layer.add_variable( 'my_var', [2, 2], initializer=tf.compat.v1.zeros_initializer()) self.assertEqual(keras_variable.name, 'foo/my_var:0') with backend.name_scope('baz'): old_style_layer = base_layers.Layer(name='my_layer') # Test basic variable creation. variable = old_style_layer.add_variable( 'my_var', [2, 2], initializer=tf.compat.v1.zeros_initializer()) self.assertEqual(variable.name, 'my_layer/my_var:0') with base_layers.keras_style_scope(): layer = base_layers.Layer(name='my_layer') # Assert that the layer was not instrumented as a Keras layer self.assertFalse(layer._instrumented_keras_api) # Test basic variable creation. with backend.name_scope('bar'): variable = layer.add_variable( 'my_var', [2, 2], initializer=tf.compat.v1.zeros_initializer()) self.assertEqual(variable.name, 'bar/my_var:0')
def test_delete_variable(self): layer = base_layer.Layer(dtype='mixed_float16') layer.x = layer.add_weight('x') self.assertEqual(layer.trainable_weights, [layer.x]) del layer.x self.assertEqual(layer.trainable_weights, [])