def testMinimalRun(self):
        x = basic.TrainableVariable(shape=(),
                                    initializers={'w':
                                                  tf.ones_initializer()})()
        x2 = x**2.0
        min_value = 0.5
        constr = optimization_constraints.OptimizationConstraints().add(
            x > min_value)

        self.assertFalse(constr._is_connected)
        loss = moving_average.MovingAverage()(x2 + tf.random.normal(
            (), stddev=1.0)) + constr()

        self.assertTrue(constr._is_connected)
        with self.assertRaisesRegexp(ValueError,
                                     'Cannot add further constraints'):
            constr.add(x > min_value)
        with self.assertRaisesRegexp(ValueError,
                                     'Cannot add further constraints'):
            constr.add_geq(x, min_value)
        with self.assertRaisesRegexp(ValueError,
                                     'Cannot add further constraints'):
            constr.add_leq(min_value < x)

        opt = tf.train.AdamOptimizer(1e-2, beta1=0.0)
        update = opt.minimize(loss)
        with tf.control_dependencies([update]):
            x2 = tf.identity(x2)

        with tf.train.MonitoredSession() as sess:
            for _ in range(500):
                v, _ = sess.run([x2, update])
        self.assertAllClose(v, min_value**2)
Exemple #2
0
    def testFirst(self):
        var = tf.Variable(10.0)
        avg = moving_average.MovingAverage(decay=0.9)(var)

        with tf.train.MonitoredSession() as sess:
            avg_value = sess.run(avg)

            # The avg should be equal to the var after only one iteration
            self.assertEqual(avg_value, 10.0)
Exemple #3
0
 def testIsDifferentiable(self):
     x = tf.get_variable(name='x', shape=())
     mva = moving_average.MovingAverage(decay=0.99, local=False)
     y = mva(x)
     dydx = tf.gradients(y, x)
     z = mva(2 * x)
     dzdx = tf.gradients(z, x)
     with tf.train.MonitoredSession() as sess:
         df = sess.run([dydx, dzdx])
     self.assertEqual(df[0], [1.0])
     self.assertEqual(df[1], [2.0])
Exemple #4
0
    def testReset(self):
        val = tf.placeholder(shape=(), dtype=tf.float32)
        module = moving_average.MovingAverage(decay=0.9)
        avg = module(val)
        reset = module.reset()

        with tf.train.MonitoredSession() as sess:
            avg_value = sess.run(avg, feed_dict={val: 10.0})

            # The avg should be equal to the var after only one iteration
            self.assertEqual(avg_value, 10.0)

            sess.run(reset)
            avg_value = sess.run(avg, feed_dict={val: 100.0})

            # The avg should be equal to the var after only one iteration, again
            self.assertEqual(avg_value, 100.0)
Exemple #5
0
    def testAverage(self, use_resource_vars):
        decay = 0.9
        num_steps = 10
        init_value = 3.14

        with tf.variable_scope('', use_resource=use_resource_vars):
            var = tf.get_variable(
                'var', (), initializer=tf.constant_initializer(init_value))

        avg = moving_average.MovingAverage(decay=decay)(tf.identity(var))
        with tf.control_dependencies([avg]):
            increment = tf.assign_add(var, 1.0)

        with tf.train.MonitoredSession() as sess:
            expected_value = init_value
            x = init_value
            for _ in range(num_steps):
                avg_value, _ = sess.run([avg, increment])
                self.assertNear(avg_value, expected_value, 1e-4)
                x += 1
                expected_value = expected_value * decay + x * (1 - decay)
Exemple #6
0
 def testAssertDecayIsValid(self):
     with self.assertRaisesRegexp(ValueError, 'Decay must be'):
         moving_average.MovingAverage(decay=2.0)