Esempio n. 1
0
 def test_one_of_mixed_input_type(self):
   block = (tdb.Identity(), tdb.Scalar('int32')) >> tdb.OneOf(
       key_fn=tdb.GetItem(0),
       case_blocks=(tdb.Function(tf.square), tdb.Function(tf.negative)),
       pre_block=tdb.GetItem(1))
   self.assertBuilds(4, block, (0, 2))
   self.assertBuilds(-2, block, (1, 2))
Esempio n. 2
0
 def test_max_depth_metrics(self):
   elem_block = tdb.Composition()
   with elem_block.scope():
     s = tdb.Scalar('int32').reads(elem_block.input)
     tdm.Metric('foo').reads(s)
     elem_block.output.reads(s)
   block = (tdb.Map(elem_block), tdb.Identity()) >> tdb.Nth()
   self.assertBuilds((31, {'foo': list(xrange(32))}), block, (range(32), -1))
Esempio n. 3
0
  def test_output_type_inference(self):
    # Identity and composite compute their output types from input types.
    block = tdb.Scalar() >> (tdb.Identity() >> tdb.Identity())
    self.assertBuildsConst(42., block, 42)

    block = ({'a': tdb.Scalar(), 'b': tdb.Vector(2) >> tdb.Identity()} >>
             tdb.Identity() >> (tdb.Identity() >> tdb.Identity()) >>
             tdb.Identity())
    self.assertBuildsConst((42., [5., 1.]), block, {'a': 42, 'b': [5, 1]})
Esempio n. 4
0
  def test_metrics_labeled(self):
    tree1 = [1, 'a', [2, 'b'], [3, 'c'], [4, 'd']]
    tree2 = [5, 'e', [6, 'f', [7, 'g']]]
    fwd = tdb.ForwardDeclaration()

    leaf = (tdb.Scalar('int32'), tdb.Identity()) >>  tdm.Metric('leaf')
    internal = tdb.AllOf(
        (tdb.Scalar('int32'), tdb.Identity())  >> tdm.Metric('internal'),
        tdb.Slice(start=2) >> tdb.Map(fwd())) >> tdb.Void()
    tree = tdb.OneOf(key_fn=lambda expr: len(expr) > 2,
                     case_blocks=(leaf, internal))
    fwd.resolve_to(tree)

    with self.test_session() as sess:
      c = tdc.Compiler.create(tree)
      feed_dict, labels = c.build_feed_dict([tree1, tree2], metric_labels=True)
      self.assertEqual(['b', 'c', 'd', 'g'], labels['leaf'])
      self.assertEqual(['a', 'e', 'f'], labels['internal'])
      leaf_values, internal_values = sess.run(
          [c.metric_tensors['leaf'], c.metric_tensors['internal']], feed_dict)
      np.testing.assert_equal([2, 3, 4, 7], leaf_values)
      np.testing.assert_equal([1, 5, 6], internal_values)
Esempio n. 5
0
 def test_eval_void(self):
   block = tdb.Identity().set_input_type(tdt.VoidType())
   self.assertBuildsConst(None, block, None)
Esempio n. 6
0
 def test_nth(self):
   block = (tdb.Map(tdb.Scalar('int32')), tdb.Identity()) >> tdb.Nth()
   for n in xrange(5):
     self.assertBuildsConst(n, block, (range(5), n))
Esempio n. 7
0
 def test_output_type_raises(self):
   block = tdb.Identity() >> tdb.Identity()
   self.assertRaisesWithLiteralMatch(
       TypeError, 'Cannot determine input type for Identity.',
       block._validate, None)
Esempio n. 8
0
 def test_fold_pyobject(self):
   block = tdb.Fold((tdb.Identity(), tdb.Scalar()) >> tdb.Sum(), tdb.Zeros([]))
   self.assertBuilds(5., block, (2, 3), max_depth=None)
Esempio n. 9
0
 def test_record_raises(self):
   six.assertRaisesRegex(
       self, RuntimeError,
       'created with an unordered dict cannot take ordered',
       tdb.Pipe, (tdb.Scalar(), tdb.Scalar()),
       {'a': tdb.Identity(), 'b': tdb.Identity()})
Esempio n. 10
0
 def test_map_pyobject_type_inference(self):
   b = tdb.Map(tdb.Identity()) >> tdb.Vector(2)
   self.assertBuildsConst([1., 2.], b, [1, 2])
Esempio n. 11
0
 def test_composition_forward_type_inference(self):
   b = tdb.Identity() >> tdb.Identity() >> tdb.Map(tdb.Function(tf.negative))
   six.assertRaisesRegex(
       self, TypeError, 'bad input type PyObjectType',
       b.input.set_input_type, tdt.PyObjectType())
Esempio n. 12
0
 def test_composition_backward_type_inference(self):
   b = tdb.Map(tdb.Identity()) >> tdb.Identity() >> tdb.Identity()
   six.assertRaisesRegex(
       self, TypeError, 'bad output type VoidType',
       b.output.set_output_type, tdt.VoidType())
Esempio n. 13
0
  def test_repr(self):
    goldens = {
        tdb.Tensor([]): '<td.Tensor dtype=\'float32\' shape=()>',
        tdb.Tensor([1, 2], 'int32', name='foo'):
        '<td.Tensor \'foo\' dtype=\'int32\' shape=(1, 2)>',

        tdb.Scalar('int64'): '<td.Scalar dtype=\'int64\'>',

        tdb.Vector(42): '<td.Vector dtype=\'float32\' size=42>',

        tdb.FromTensor(tf.zeros(3)): '<td.FromTensor \'zeros:0\'>',

        tdb.Function(tf.negative,
                     name='foo'): '<td.Function \'foo\' tf_fn=\'negative\'>',

        tdb.Identity(): '<td.Identity>',
        tdb.Identity('foo'): '<td.Identity \'foo\'>',

        tdb.InputTransform(ord): '<td.InputTransform py_fn=\'ord\'>',

        tdb.SerializedMessageToTree('foo'):
        '<td.SerializedMessageToTree \'foo\' '
        'py_fn=\'serialized_message_to_tree\'>',

        tdb.GetItem(3, 'mu'): '<td.GetItem \'mu\' key=3>',

        tdb.Length(): '<td.Length dtype=\'float32\'>',

        tdb.Slice(stop=2): '<td.Slice key=slice(None, 2, None)>',
        tdb.Slice(stop=2, name='x'):
        '<td.Slice \'x\' key=slice(None, 2, None)>',

        tdb.ForwardDeclaration(name='foo')():
        '<td.ForwardDeclaration() \'foo\'>',

        tdb.Composition(name='x').input: '<td.Composition.input \'x\'>',
        tdb.Composition(name='x').output: '<td.Composition.output \'x\'>',
        tdb.Composition(name='x'): '<td.Composition \'x\'>',

        tdb.Pipe(): '<td.Pipe>',
        tdb.Pipe(tdb.Scalar(), tdb.Identity()): '<td.Pipe>',

        tdb.Record({}, name='x'): '<td.Record \'x\' ordered=False>',
        tdb.Record((), name='x'): '<td.Record \'x\' ordered=True>',

        tdb.AllOf(): '<td.AllOf>',
        tdb.AllOf(tdb.Identity()): '<td.AllOf>',
        tdb.AllOf(tdb.Identity(), tdb.Identity()): '<td.AllOf>',

        tdb.AllOf(name='x'): '<td.AllOf \'x\'>',
        tdb.AllOf(tdb.Identity(), name='x'): '<td.AllOf \'x\'>',
        tdb.AllOf(tdb.Identity(), tdb.Identity(), name='x'): '<td.AllOf \'x\'>',

        tdb.Map(tdb.Scalar(), name='x'):
        '<td.Map \'x\' element_block=<td.Scalar dtype=\'float32\'>>',

        tdb.Fold(tdb.Function(tf.add), tf.ones([]), name='x'):
        '<td.Fold \'x\' combine_block=<td.Function tf_fn=\'add\'> '
        'start_block=<td.FromTensor \'ones:0\'>>',

        tdb.RNN(tdl.ScopedLayer(tf.contrib.rnn.GRUCell(num_units=8))):
        '<td.RNN>',
        tdb.RNN(tdl.ScopedLayer(tf.contrib.rnn.GRUCell(num_units=8)), name='x'):
        '<td.RNN \'x\'>',
        tdb.RNN(tdl.ScopedLayer(tf.contrib.rnn.GRUCell(num_units=8)),
                initial_state=tf.ones(8)):
        '<td.RNN>',
        tdb.RNN(tdl.ScopedLayer(tf.contrib.rnn.GRUCell(num_units=8)),
                initial_state=tf.ones(8), name='x'):
        '<td.RNN \'x\'>',

        tdb.Reduce(tdb.Function(tf.add), name='x'):
        '<td.Reduce \'x\' combine_block=<td.Function tf_fn=\'add\'>>',

        tdb.Sum(name='foo'):
        '<td.Sum \'foo\' combine_block=<td.Function tf_fn=\'add\'>>',

        tdb.Min(name='foo'):
        '<td.Min \'foo\' combine_block=<td.Function tf_fn=\'minimum\'>>',

        tdb.Max(name='foo'):
        '<td.Max \'foo\' combine_block=<td.Function tf_fn=\'maximum\'>>',

        tdb.Mean(name='foo'): '<td.Mean \'foo\'>',

        tdb.OneOf(ord, (tdb.Scalar(), tdb.Scalar()), name='x'):
        '<td.OneOf \'x\'>',

        tdb.Optional(tdb.Scalar(), name='foo'):
        '<td.Optional \'foo\' some_case_block=<td.Scalar dtype=\'float32\'>>',

        tdb.Concat(1, True, 'x'):
        '<td.Concat \'x\' concat_dim=1 flatten=True>',

        tdb.Broadcast(name='x'): '<td.Broadcast \'x\'>',

        tdb.Zip(name='x'): '<td.Zip \'x\'>',

        tdb.NGrams(n=42, name='x'): '<td.NGrams \'x\' n=42>',

        tdb.OneHot(2, 3, name='x'):
        '<td.OneHot \'x\' dtype=\'float32\' start=2 stop=3>',
        tdb.OneHot(3): '<td.OneHot dtype=\'float32\' start=0 stop=3>',

        tdb.OneHotFromList(['a', 'b']): '<td.OneHotFromList>',
        tdb.OneHotFromList(['a', 'b'], name='foo'):
        '<td.OneHotFromList \'foo\'>',

        tdb.Nth(name='x'): '<td.Nth \'x\'>',

        tdb.Zeros([], 'x'): '<td.Zeros \'x\'>',

        tdb.Void(): '<td.Void>',
        tdb.Void('foo'): '<td.Void \'foo\'>',

        tdm.Metric('foo'): '<td.Metric \'foo\'>'}
    for block, expected_repr in sorted(six.iteritems(goldens),
                                       key=lambda kv: kv[1]):
      self.assertEqual(repr(block), expected_repr)
Esempio n. 14
0
 def test_eval_metrics(self):
   b = tdb.Map(tdb.Scalar() >> tdb.AllOf(tdm.Metric('x'), tdb.Identity()))
   self.assertBuilds(([(None, 1.), (None, 2.)], {'x': [1., 2.]}), b, [1, 2,])