예제 #1
0
 def testLayerProperties(self):
     layer = base_layers.Layer(name='my_layer')
     self.assertListEqual(layer.variables, [])
     self.assertListEqual(layer.trainable_variables, [])
     self.assertListEqual(layer.non_trainable_variables, [])
     self.assertListEqual(layer.updates, [])
     self.assertListEqual(layer.losses, [])
     self.assertEqual(layer.built, False)
     layer = base_layers.Layer(name='my_layer', trainable=False)
     self.assertEqual(layer.trainable, False)
예제 #2
0
 def testLayerProperties(self):
     layer = base_layers.Layer(name='my_layer')
     self.assertEqual(layer.variables, [])
     self.assertEqual(layer.trainable_variables, [])
     self.assertEqual(layer.non_trainable_variables, [])
     if not context.executing_eagerly():
         # updates, losses only supported in GRAPH mode
         self.assertEqual(layer.updates, [])
         self.assertEqual(layer.losses, [])
     self.assertEqual(layer.built, False)
     layer = base_layers.Layer(name='my_layer', trainable=False)
     self.assertEqual(layer.trainable, False)
예제 #3
0
 def testLayerProperties(self):
   layer = base_layers.Layer(name='my_layer')
   self.assertListEqual(layer.variables, [])
   self.assertListEqual(layer.trainable_variables, [])
   self.assertListEqual(layer.non_trainable_variables, [])
   if context.in_graph_mode():
     # updates, losses only suppported in GRAPH mode
     self.assertListEqual(layer.updates, [])
     self.assertListEqual(layer.losses, [])
   self.assertEqual(layer.built, False)
   layer = base_layers.Layer(name='my_layer', trainable=False)
   self.assertEqual(layer.trainable, False)
예제 #4
0
    def testAddWeight(self):
        with self.test_session():
            layer = base_layers.Layer(name='my_layer')

            # Test basic variable creation.
            variable = layer.add_variable(
                'my_var', [2, 2], initializer=init_ops.zeros_initializer())
            self.assertEqual(variable.name, 'my_layer/my_var:0')
            self.assertListEqual(layer.variables, [variable])
            self.assertListEqual(layer.trainable_variables, [variable])
            self.assertListEqual(layer.non_trainable_variables, [])
            self.assertListEqual(
                layer.variables,
                ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES))

            # Test non-trainable variable creation.
            # layer.add_variable should work even outside `build` and `call`.
            variable_2 = layer.add_variable(
                'non_trainable_var', [2, 2],
                initializer=init_ops.zeros_initializer(),
                trainable=False)
            self.assertListEqual(layer.variables, [variable, variable_2])
            self.assertListEqual(layer.trainable_variables, [variable])
            self.assertListEqual(layer.non_trainable_variables, [variable_2])
            self.assertEqual(
                len(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)), 1)

            # Test with regularizer.
            regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
            variable = layer.add_variable(
                'reg_var', [2, 2],
                initializer=init_ops.zeros_initializer(),
                regularizer=regularizer)
            self.assertEqual(len(layer.losses), 1)
예제 #5
0
파일: base_test.py 프로젝트: Harryi0/tinyML
  def testAddWeight(self):
    layer = base_layers.Layer(name='my_layer')

    # Test basic variable creation.
    variable = layer.add_variable(
        'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(variable.name, 'my_layer/my_var:0')
    self.assertEqual(layer.variables, [variable])
    self.assertEqual(layer.trainable_variables, [variable])
    self.assertEqual(layer.non_trainable_variables, [])
    if not context.executing_eagerly():
      self.assertEqual(
          layer.variables,
          ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES))

    # Test non-trainable variable creation.
    # layer.add_variable should work even outside `build` and `call`.
    variable_2 = layer.add_variable(
        'non_trainable_var', [2, 2],
        initializer=init_ops.zeros_initializer(),
        trainable=False)
    self.assertEqual(layer.variables, [variable, variable_2])
    self.assertEqual(layer.trainable_variables, [variable])
    self.assertEqual(layer.non_trainable_variables, [variable_2])

    if not context.executing_eagerly():
      self.assertEqual(
          len(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)), 1)

    regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
    _ = layer.add_variable(
        'reg_var', [2, 2],
        initializer=init_ops.zeros_initializer(),
        regularizer=regularizer)
    self.assertEqual(len(layer.losses), 1)

    added_variable = [False]

    # Test that sync `ON_READ` variables are defaulted to be non-trainable.
    variable_3 = layer.add_variable(
        'sync_on_read_var', [2, 2],
        initializer=init_ops.zeros_initializer(),
        synchronization=variable_scope.VariableSynchronization.ON_READ,
        aggregation=variable_scope.VariableAggregation.SUM)
    self.assertEqual(layer.non_trainable_variables, [variable_2, variable_3])

    @def_function.function
    def function_adds_weight():
      if not added_variable[0]:
        layer.add_variable(
            'reg_var_from_function', [2, 2],
            initializer=init_ops.zeros_initializer(),
            regularizer=regularizer)
        added_variable[0] = True

    function_adds_weight()
    self.assertEqual(len(layer.losses), 2)
예제 #6
0
파일: base_test.py 프로젝트: Harryi0/tinyML
  def testKerasStyleAddWeight(self):
    keras_layer = keras_base_layer.Layer(name='keras_layer')
    with ops.name_scope('foo', skip_on_eager=False):
      keras_variable = keras_layer.add_variable(
          'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(keras_variable.name, 'foo/my_var:0')

    with ops.name_scope('baz', skip_on_eager=False):
      old_style_layer = base_layers.Layer(name='my_layer')
      # Test basic variable creation.
      variable = old_style_layer.add_variable(
          'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(variable.name, 'my_layer/my_var:0')

    with base_layers.keras_style_scope():
      layer = base_layers.Layer(name='my_layer')
    # Test basic variable creation.
    with ops.name_scope('bar', skip_on_eager=False):
      variable = layer.add_variable(
          'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(variable.name, 'bar/my_var:0')
예제 #7
0
    def testInvalidTrainableSynchronizationCombination(self):
        layer = base_layers.Layer(name='my_layer')

        with self.assertRaisesRegexp(
                ValueError, 'Synchronization value can be set to '
                'VariableSynchronization.ON_READ only for non-trainable variables. '
                'You have specified trainable=True and '
                'synchronization=VariableSynchronization.ON_READ.'):
            _ = layer.add_variable(
                'v', [2, 2],
                initializer=init_ops.zeros_initializer(),
                synchronization=variable_scope.VariableSynchronization.ON_READ,
                trainable=True)
예제 #8
0
파일: base_test.py 프로젝트: Harryi0/tinyML
 def testReusePartitionedVaraiblesAndRegularizers(self):
   regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
   partitioner = partitioned_variables.fixed_size_partitioner(3)
   for reuse in [False, True]:
     with variable_scope.variable_scope(variable_scope.get_variable_scope(),
                                        partitioner=partitioner,
                                        reuse=reuse):
       layer = base_layers.Layer(name='my_layer')
       _ = layer.add_variable(
           'reg_part_var', [4, 4],
           initializer=init_ops.zeros_initializer(),
           regularizer=regularizer)
   self.assertEqual(
       len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 3)
예제 #9
0
 def testInt64Layer(self):
     layer = base_layers.Layer(name='my_layer', dtype='int64')
     layer.add_variable('my_var', [2, 2])
     self.assertEqual(layer.name, 'my_layer')
예제 #10
0
 def testActivityRegularizer(self):
     regularizer = math_ops.reduce_sum
     layer = base_layers.Layer(activity_regularizer=regularizer)
     x = array_ops.placeholder('int32')
     layer.apply(x)
     self.assertEqual(len(layer.get_losses_for(x)), 1)
예제 #11
0
from tensorflow.python.layers import base as base_layer

print(dir(base_layer.Layer))
print(dir(base_layer.Layer()))