def testOptimizationDifferentOrderOptionsCompareEqual(self): with ops.Graph().as_default() as first_graph: dataset = dataset_ops.Dataset.from_tensors(0) dataset_ops._OptimizeDataset( dataset, ["map_and_batch_fusion", "noop_elimination"], [], []) with ops.Graph().as_default() as second_graph: dataset = dataset_ops.Dataset.from_tensors(0) dataset_ops._OptimizeDataset( dataset, ["noop_elimination", "map_and_batch_fusion"], [], []) self.assertEqual(first_graph.as_graph_def(), second_graph.as_graph_def())
def testOptimizationNonSerializableAsDirectInput(self): """Tests that non-serializable dataset can be OptimizeDataset's input. """ dataset = dataset_ops.Dataset.from_tensors(0) dataset = dataset.apply(optimization.non_serializable()) dataset = dataset_ops._OptimizeDataset(dataset, ["noop_elimination"]) self.assertDatasetProduces(dataset, expected_output=[0])
def testOptimizationNonSerializableAsDirectInput(self): """Tests that non-serializable dataset can be OptimizeDataset's input. """ dataset = dataset_ops.Dataset.from_tensors(0) dataset = dataset.apply(optimization.non_serializable()) dataset = dataset_ops._OptimizeDataset(dataset, ["noop_elimination"]) self.assertDatasetProduces(dataset, expected_output=[0])
def testOptimizationStatefulFunction(self): dataset = dataset_ops.Dataset.range(10).map( lambda _: random_ops.random_uniform([])).batch(10) dataset = dataset_ops._OptimizeDataset(dataset, []) iterator = dataset.make_one_shot_iterator() get_next = iterator.get_next() with self.cached_session() as sess: sess.run(get_next)
def testOptimizationStatefulFunction(self): dataset = dataset_ops.Dataset.range(10).map( lambda _: random_ops.random_uniform([])).batch(10) dataset = dataset_ops._OptimizeDataset(dataset, []) iterator = dataset.make_one_shot_iterator() get_next = iterator.get_next() with self.cached_session() as sess: sess.run(get_next)
def testOptimizationNonSerializable(self): dataset = dataset_ops.Dataset.from_tensors(0) dataset = dataset.apply(optimization.assert_next(["FiniteSkip"])) dataset = dataset.skip(0) # Should not be removed by noop elimination dataset = dataset.apply(optimization.non_serializable()) dataset = dataset.apply(optimization.assert_next(["MemoryCacheImpl"])) dataset = dataset.skip(0) # Should be removed by noop elimination dataset = dataset.cache() dataset = dataset_ops._OptimizeDataset(dataset, ["noop_elimination"]) self.assertDatasetProduces(dataset, expected_output=[0])
def testOptimizationNonSerializable(self): dataset = dataset_ops.Dataset.from_tensors(0) dataset = dataset.apply(optimization.assert_next(["FiniteSkip"])) dataset = dataset.skip(0) # Should not be removed by noop elimination dataset = dataset.apply(optimization.non_serializable()) dataset = dataset.apply(optimization.assert_next(["MemoryCacheImpl"])) dataset = dataset.skip(0) # Should be removed by noop elimination dataset = dataset.cache() dataset = dataset_ops._OptimizeDataset(dataset, ["noop_elimination"]) self.assertDatasetProduces(dataset, expected_output=[0])
def testOptimizationDoubleOptimizeDatasetNested(self): def flat_map_fn(_): dataset = dataset_ops.Dataset.from_tensors(0) dataset = dataset.apply(testing.assert_next(["MapAndBatch"])) dataset = dataset.skip(0) # Should be fused by map and batch fusion dataset = dataset.map(lambda x: x) dataset = dataset.batch(1) return dataset dataset = dataset_ops.Dataset.from_tensors(0) dataset = dataset.flat_map(flat_map_fn) dataset = dataset_ops._OptimizeDataset(dataset, ["map_and_batch_fusion"], [], []) dataset = dataset_ops._OptimizeDataset(dataset, ["noop_elimination"], [], []) self.assertDatasetProduces(dataset, expected_output=[[0]])
def testOptimizationLargeInputFromTensorSlices(self): input_t = array_ops.placeholder(dtypes.int32, (None, None, None, None)) dataset = dataset_ops.Dataset.from_tensor_slices(input_t) dataset = dataset_ops._OptimizeDataset(dataset, []) iterator = dataset.make_initializable_iterator() init_op = iterator.initializer get_next = iterator.get_next() with self.cached_session() as sess: sess.run(init_op, {input_t: np.ones([1, 512, 1024, 1025], np.int32)}) sess.run(get_next)
def testSkipEagerOptimizationLargeInputFromTensor(self): input_t = array_ops.placeholder(dtypes.int32, (None, None, None)) dataset = dataset_ops.Dataset.from_tensors(input_t) dataset = dataset_ops._OptimizeDataset(dataset, []) iterator = dataset_ops.make_initializable_iterator(dataset) init_op = iterator.initializer get_next = iterator.get_next() with self.cached_session() as sess: sess.run(init_op, {input_t: np.ones([512, 1024, 1025], np.int32)}) self.evaluate(get_next)
def testOptimizationThreadPoolDataset(self): dataset = dataset_ops.Dataset.range(10).batch(10) dataset = threadpool.override_threadpool( dataset, threadpool.PrivateThreadPool( 2, display_name="private_thread_pool_%d" % 2)) dataset = dataset_ops._OptimizeDataset(dataset, []) self.assertDatasetProduces(dataset, expected_output=[list(range(10))], requires_initialization=True)
def testOptimizationNonSerializableAsDirectInput(self): """Tests that non-serializable dataset can be OptimizeDataset's input. """ dataset = dataset_ops.Dataset.from_tensors(0) dataset = dataset.apply(optimization.non_serializable()) dataset = dataset_ops._OptimizeDataset(dataset, ["noop_elimination"]) iterator = dataset.make_one_shot_iterator() get_next = iterator.get_next() with self.cached_session() as sess: self.assertEquals(0, sess.run(get_next)) with self.assertRaises(errors.OutOfRangeError): sess.run(get_next)
def testOptimizationNestedDataset(self): def flat_map_fn(_): dataset = dataset_ops.Dataset.from_tensors(0) dataset = dataset.apply( optimization.assert_next(["MemoryCacheImpl"])) dataset = dataset.skip(0) # Should be removed by noop elimination dataset = dataset.cache() return dataset dataset = dataset_ops.Dataset.range(1) dataset = dataset.flat_map(flat_map_fn) dataset = dataset_ops._OptimizeDataset(dataset, ["noop_elimination"]) self.assertDatasetProduces(dataset, expected_output=[0])
def testOptimizationNestedDataset(self): def flat_map_fn(_): dataset = dataset_ops.Dataset.from_tensors(0) dataset = dataset.apply(optimization.assert_next(["MemoryCacheImpl"])) dataset = dataset.skip(0) # Should be removed by noop elimination dataset = dataset.cache() return dataset dataset = dataset_ops.Dataset.range(1) dataset = dataset.flat_map(flat_map_fn) dataset = dataset_ops._OptimizeDataset(dataset, ["noop_elimination"]) self.assertDatasetProduces(dataset, expected_output=[0])
def testOptimizationThreadPoolDataset(self): dataset = dataset_ops.Dataset.range(10).batch(10) dataset = threadpool.override_threadpool( dataset, threadpool.PrivateThreadPool( 2, display_name="private_thread_pool_%d" % 2)) dataset = dataset_ops._OptimizeDataset(dataset, []) self.assertDatasetProduces( dataset, expected_output=[list(range(10))], requires_initialization=True)
def testOptimizationNestedDatasetWithModifiedRetval(self): def flat_map_fn(_): dataset = dataset_ops.Dataset.from_tensors(0) dataset = dataset.apply(optimization.assert_next(["MapAndBatch"])) # Should be fused by map and batch fusion dataset = dataset.map(lambda x: x) dataset = dataset.batch(1) return dataset dataset = dataset_ops.Dataset.range(1) dataset = dataset.flat_map(flat_map_fn) dataset = dataset_ops._OptimizeDataset(dataset, ["map_and_batch_fusion"]) self.assertDatasetProduces(dataset, expected_output=[[0]])
def testOptimizationNonSerializable(self): dataset = dataset_ops.Dataset.from_tensors(0) dataset = dataset.apply(optimization.assert_next(["FiniteSkip"])) dataset = dataset.skip(0) # Should not be removed by noop elimination dataset = dataset.apply(optimization.non_serializable()) dataset = dataset.apply(optimization.assert_next(["MemoryCacheImpl"])) dataset = dataset.skip(0) # Should be removed by noop elimination dataset = dataset.cache() dataset = dataset_ops._OptimizeDataset(dataset, ["noop_elimination"]) iterator = dataset.make_one_shot_iterator() get_next = iterator.get_next() with self.cached_session() as sess: self.assertEquals(0, sess.run(get_next)) with self.assertRaises(errors.OutOfRangeError): sess.run(get_next)
def testOptimizationThreadPoolDataset(self): dataset = dataset_ops.Dataset.range(10).batch(10) dataset = threadpool.override_threadpool( dataset, threadpool.PrivateThreadPool( 2, display_name="private_thread_pool_%d" % 2)) dataset = dataset_ops._OptimizeDataset(dataset, []) iterator = dataset.make_initializable_iterator() get_next = iterator.get_next() with self.cached_session() as sess: sess.run(iterator.initializer) self.assertAllEqual(list(range(10)), sess.run(get_next)) with self.assertRaises(errors.OutOfRangeError): sess.run(get_next)
def testOptimizationThreadPoolDataset(self): dataset = dataset_ops.Dataset.range(10).batch(10) dataset = threadpool.override_threadpool( dataset, threadpool.PrivateThreadPool( 2, display_name="private_thread_pool_%d" % 2)) dataset = dataset_ops._OptimizeDataset(dataset, []) iterator = dataset.make_initializable_iterator() get_next = iterator.get_next() with self.cached_session() as sess: sess.run(iterator.initializer) self.assertAllEqual(list(range(10)), sess.run(get_next)) with self.assertRaises(errors.OutOfRangeError): sess.run(get_next)
def testOptimizationNestedDataset(self): def flat_map_fn(_): dataset = dataset_ops.Dataset.from_tensors(0) dataset = dataset.apply(optimization.assert_next(["MemoryCacheImpl"])) dataset = dataset.skip(0) # Should be removed by noop elimination dataset = dataset.cache() return dataset dataset = dataset_ops.Dataset.range(1) dataset = dataset.flat_map(flat_map_fn) dataset = dataset_ops._OptimizeDataset(dataset, ["noop_elimination"]) iterator = dataset.make_one_shot_iterator() get_next = iterator.get_next() with self.cached_session() as sess: self.assertEquals(0, sess.run(get_next)) with self.assertRaises(errors.OutOfRangeError): sess.run(get_next)
def testOptimizationNestedDataset(self): def flat_map_fn(_): dataset = dataset_ops.Dataset.from_tensors(0) dataset = dataset.apply( optimization.assert_next(["MemoryCacheImpl"])) dataset = dataset.skip(0) # Should be removed by noop elimination dataset = dataset.cache() return dataset dataset = dataset_ops.Dataset.range(1) dataset = dataset.flat_map(flat_map_fn) dataset = dataset_ops._OptimizeDataset(dataset, ["noop_elimination"]) iterator = dataset.make_one_shot_iterator() get_next = iterator.get_next() with self.cached_session() as sess: self.assertEquals(0, sess.run(get_next)) with self.assertRaises(errors.OutOfRangeError): sess.run(get_next)
def testOptimizationStatefulFunction(self): dataset = dataset_ops.Dataset.range(10).map( lambda _: random_ops.random_uniform([])).batch(10) dataset = dataset_ops._OptimizeDataset(dataset, []) get_next = self.getNext(dataset) self.evaluate(get_next())
def _apply_fn(dataset): """Function from `Dataset` to `Dataset` that applies the transformation.""" return dataset_ops._OptimizeDataset(dataset, optimizations) # pylint: disable=protected-access
def testOptimizationStatefulFunction(self): dataset = dataset_ops.Dataset.range(10).map( lambda _: random_ops.random_uniform([])).batch(10) dataset = dataset_ops._OptimizeDataset(dataset, []) get_next = self.getNext(dataset) self.evaluate(get_next())