Esempio n. 1
0
 def testFunctions(self):
     # Create a sequential network with one function.
     net = network.Sequential([nn_ops.relu])
     two = constant_op.constant(2.0)
     self.assertEqual(2.0, net(two).numpy())
     self.assertEqual(0.0, net(-two).numpy())
     # Add a second function.
     net.add(math_ops.negative)
     self.assertEqual(-2.0, net(two).numpy())
Esempio n. 2
0
 def testTrainingLayer(self):
     net = network.Sequential([core.Dropout(0.99999)])
     two = constant_op.constant(2.0)
     self.assertEqual(2.0, net(two).numpy())
     self.assertEqual(2.0, net(two, training=False).numpy())
     for _ in range(20):
         with_dropout = net(two, training=True).numpy()
         self.assertIn(with_dropout, [0.0, 2.0])
         if with_dropout == 0.0:
             return
     # Should only fail spuriously 1 in 10^100 runs.
     self.fail("Didn't see dropout happen after 20 tries.")
Esempio n. 3
0
    def testTrainingFunction(self):
        # Output depends on value of "training".
        def add_training(input_value, training=None):
            if training is None:
                return input_value
            elif training:
                return input_value + 1
            return input_value - 1

        # Passing a "training" argument to double would cause an error.
        def double(input_value):
            return 2 * input_value

        net = network.Sequential([add_training, double])
        two = constant_op.constant(2)
        self.assertEqual(4, net(two).numpy())
        self.assertEqual(2, net(two, training=False).numpy())
        self.assertEqual(6, net(two, training=True).numpy())
Esempio n. 4
0
    def testTwoLayers(self):
        # Create a sequential network with one layer.
        net = network.Sequential([core.Dense(1, use_bias=False)])

        # Set that layer's weights so it multiplies by 3
        l1 = net.get_layer(index=0)
        net(constant_op.constant([[2.0]]))  # Create l1's variables
        self.assertEqual(1, len(l1.trainable_variables))
        l1.trainable_variables[0].assign([[3.0]])
        self.assertEqual(21.0, net(constant_op.constant([[7.0]])).numpy())

        # Add a second layer to the network.
        l2 = core.Dense(1, use_bias=False)
        net.add(l2)

        # Set the second layer's weights so it multiplies by 11
        net(constant_op.constant([[2.0]]))  # Create l2's variables
        self.assertEqual(1, len(l2.trainable_variables))
        l2.trainable_variables[0].assign([[11.0]])
        self.assertEqual(231.0, net(constant_op.constant([[7.0]])).numpy())