Exemplo n.º 1
0
    def testSplitDictOfTensorsAssert(self):
        t1 = tf.constant([[1], [7], [8]])
        t2 = tf.constant([[5], [9], [10]])
        t3 = tf.constant([[13], [14]])

        tensor_dict = {'a': t1, 'b': t2, 'c': t3}
        num_splits = 2

        with self.assertRaisesRegexp(
                ValueError,
                'can\'t split axis of size 2 into pieces of size \[2,1\]'):
            splits = input_generator_helper.SplitDictOfTensors(
                tensor_dict, num_splits)
Exemplo n.º 2
0
    def testDecoderFPropSplitBatch(self, dtype=tf.float32):
        with self.session(use_gpu=True) as sess:
            tf.set_random_seed(_TF_RANDOM_SEED)
            p = self._DecoderParams(dtype=dtype)
            dec = decoder.TransformerDecoder(p)

            src_enc, src_paddings, targets = self._Inputs(dtype=dtype)
            src_enc1, src_enc2 = tf.split(src_enc, 2, 1)
            src_paddings1, src_paddings2 = tf.split(src_paddings, 2, 1)

            # source idx <-> target idx:
            # 0 <-> (0, 4), 1 <-> (1, 5), 2 <-> (2, 6), 3 <-> (3, 7)
            tgts = ig_helper.SplitDictOfTensors(targets, 4)
            targets1 = py_utils.NestedMap({
                'ids':
                tf.concat([tgts[0]['ids'], tgts[2]['ids']], 0),
                'labels':
                tf.concat([tgts[0]['labels'], tgts[2]['labels']], 0),
                'weights':
                tf.concat([tgts[0]['weights'], tgts[2]['weights']], 0),
                'paddings':
                tf.concat([tgts[0]['paddings'], tgts[2]['paddings']], 0)
            })
            targets2 = py_utils.NestedMap({
                'ids':
                tf.concat([tgts[1]['ids'], tgts[3]['ids']], 0),
                'labels':
                tf.concat([tgts[1]['labels'], tgts[3]['labels']], 0),
                'weights':
                tf.concat([tgts[1]['weights'], tgts[3]['weights']], 0),
                'paddings':
                tf.concat([tgts[1]['paddings'], tgts[3]['paddings']], 0)
            })

            loss, _ = dec.FPropDefaultTheta(src_enc, src_paddings, targets,
                                            None)['loss']
            loss1, _ = dec.FPropDefaultTheta(src_enc1, src_paddings1, targets1,
                                             None)['loss']
            loss2, _ = dec.FPropDefaultTheta(src_enc2, src_paddings2, targets2,
                                             None)['loss']

            tf.global_variables_initializer().run()
            actual_loss, actual_loss1, actual_loss2 = sess.run(
                [loss, loss1, loss2])
            print('actual loss = ', actual_loss)
            print('actual loss1 = ', actual_loss1)
            print('actual loss2 = ', actual_loss2)
            self.assertAlmostEqual(actual_loss,
                                   np.mean([actual_loss1, actual_loss2]),
                                   delta=0.0001)
    def testSplitDictOfTensorsUneven(self):
        with self.session(use_gpu=False) as sess:
            t1 = tf.constant([[1], [4], [8]])
            t2 = tf.constant([[5], [9], [10]])
            t3 = tf.constant([[13], [14], [11]])

            tensor_dict = {'a': t1, 'b': t2, 'c': t3}
            num_splits = 2
            splits = input_generator_helper.SplitDictOfTensors(
                tensor_dict, num_splits)
            expected = [{
                'a': np.array([[1], [4]]),
                'b': np.array([[5], [9]]),
                'c': np.array([[13], [14]])
            }, {
                'a': np.array([[8]]),
                'b': np.array([[10]]),
                'c': np.array([[11]])
            }]

            actual = sess.run(splits)
            self._assertListOfDictsEqual(actual, expected)