Beispiel #1
0
 def test_dequeue(self):
   p = plan.TrainPlan()
   p.compiler = block_compiler.Compiler().compile(blocks.Scalar())
   p.is_chief_trainer = True
   p.batch_size = 3
   p.batches_per_epoch = 2
   p.queue_capacity = 12
   p.num_dequeuers = 1
   p.ps_tasks = 1
   q = p._create_queue(0)
   p._setup_dequeuing([q])
   input_batch = list(p.compiler.build_loom_inputs([7])) * 3
   q_enqueue = q.enqueue_many([input_batch * 4])
   p.losses['foo'], = p.compiler.output_tensors
   p.train_op = tf.no_op()
   p.finalize_stats()
   p.logdir = self.get_temp_dir()
   p.epochs = 2
   p.print_file = six.StringIO()
   init_op = tf.global_variables_initializer()
   sv = p.create_supervisor()
   with self.test_session() as sess:
     sess.run(init_op)
     sess.run(q_enqueue)
     p.run(sv, sess)
   expected = '\n'.join(['running train',
                         'train_size: 6',
                         'epoch:    1 train[loss: 7.000e+00]',
                         'epoch:    2 train[loss: 7.000e+00]',
                         'final model saved in file: %s' % p.logdir])
   log_str = p.print_file.getvalue()
   self.assertIn(expected, log_str)
Beispiel #2
0
 def test_enqueue(self):
   p = plan.TrainPlan()
   p.compiler = block_compiler.Compiler().compile(blocks.Scalar())
   p.examples = [7] * 8  # two items should be ignored (8 % 3 == 2)
   p.is_chief_trainer = True
   p.batch_size = 3
   p.queue_capacity = 12
   p.num_dequeuers = 1
   p.ps_tasks = 1
   q = p._create_queue(0)
   p._setup_enqueuing([q])
   q_size = q.size()
   q_dequeue = q.dequeue_many(12)
   p.finalize_stats()
   p.logdir = self.get_temp_dir()
   p.epochs = 2
   p.print_file = six.StringIO()
   init_op = tf.global_variables_initializer()
   sv = p.create_supervisor()
   with self.test_session() as sess:
     sess.run(init_op)
     p.run(sv, sess)
     expected = '\n'.join(['running train',
                           'train_size: 6',
                           'epoch:    1 train[loss: 0.000e+00]',
                           'epoch:    2 train[loss: 0.000e+00]',
                           'final model saved in file: %s' % p.logdir])
     log_str = p.print_file.getvalue()
     self.assertIn(expected, log_str)
     self.assertEqual(12, sess.run(q_size))
     loom_inputs = sess.run(q_dequeue)
     self.assertEqual((12,), loom_inputs.shape)
     results = sess.run(p.compiler.output_tensors[0],
                        {p.compiler.loom_input_tensor: loom_inputs})
     self.assertEqual(results.tolist(), [7] * 12)
Beispiel #3
0
 def test_init_loom_loom_input_tensor(self):
     loom_input_tensor = tf.placeholder('string')
     c = tdc.Compiler()
     c.compile(tdb.Scalar())
     c.init_loom(0, loom_input_tensor)
     with self.test_session() as sess:
         inp, = list(c.build_loom_inputs([42]))
         self.assertAllEqual([42],
                             sess.run(c.output_tensors[0],
                                      {loom_input_tensor: inp}))
Beispiel #4
0
    def test_init_loom(self):
        p = plan.TrainPlan()
        p.compiler = block_compiler.Compiler().compile(blocks.Scalar())
        p.batch_size = 3
        p.task = 13
        p.num_dequeuers = 7

        self.assertRaisesWithLiteralMatch(ValueError,
                                          'must have at least one PS task; 0',
                                          p.init_loom)

        p.ps_tasks = 5
        self.assertRaisesWithLiteralMatch(
            ValueError, 'worker_replicas must be at least num_queues + '
            'num_dequeuers; 0 vs. 5 + 7 = 12', p.init_loom)
        p.worker_replicas = 14

        # Would be best to actually create a queue and inspect it, but
        # tf.QueueBase doesn't currently expose these properties.
        self.assertEqual(p._create_queue(3, ctor=dict)['capacity'], 12)
        p.queue_capacity = 42
        q_dict = p._create_queue(3, ctor=dict)
        self.assertEqual(q_dict['capacity'], 42)
        self.assertEqual(q_dict['shared_name'], 'tensorflow_fold_plan_queue3')

        self.assertEqual(p.init_loom(), (True, False))

        p.compiler = block_compiler.Compiler().compile(blocks.Scalar())
        p.task = 3
        self.assertEqual(p.init_loom(), (False, True))

        p.compiler = block_compiler.Compiler().compile(blocks.Scalar())
        p.num_dequeuers = 0
        self.assertRaisesWithLiteralMatch(
            ValueError, 'cannot specify queue_capacity without also '
            'specifying num_dequeuers', p.init_loom)

        p.compiler = block_compiler.Compiler().compile(blocks.Scalar())
        p.queue_capacity = 0
        self.assertEqual(p.init_loom(), (True, True))
Beispiel #5
0
 def test_compiler_input_tensor(self):
     input_tensor = tf.Variable(['foobar', 'baz'],
                                dtype=tf.string,
                                name='input_variable')
     init_op = tf.global_variables_initializer()
     root_block = tdb.InputTransform(len) >> tdb.Scalar()
     compiler = tdc.Compiler()
     compiler.compile(root_block)
     compiler.init_loom(max_depth=1, input_tensor=input_tensor)
     output_tensor, = compiler.output_tensors
     with self.test_session() as sess:
         sess.run(init_op)
         results = sess.run(output_tensor)
         self.assertEqual(len(results), 2)
         self.assertEqual(results[0], 6.)
         self.assertEqual(results[1], 3.)
         sess.run(input_tensor.assign(['foo', 'blah']))
         results = sess.run(output_tensor)
         self.assertEqual(len(results), 2)
         self.assertEqual(results[0], 3.)
         self.assertEqual(results[1], 4.)