def testLatencyStatsOptimizationV2(self): aggregator = stats_aggregator.StatsAggregator() dataset = dataset_ops.Dataset.from_tensors(1).apply( optimization.assert_next([ "LatencyStats", "Map", "LatencyStats", "Prefetch", "LatencyStats" ])).map(lambda x: x * x).prefetch(1) options = dataset_ops.Options() options.experimental_stats = stats_options.StatsOptions(aggregator) dataset = dataset.with_options(options) iterator = dataset.make_initializable_iterator() get_next = iterator.get_next() summary_t = aggregator.get_summary() with self.cached_session() as sess: sess.run(iterator.initializer) self.assertEqual(1, sess.run(get_next)) with self.assertRaises(errors.OutOfRangeError): sess.run(get_next) summary_str = sess.run(summary_t) self._assertSummaryHasCount(summary_str, "record_latency_TensorDataset/_1", 1) self._assertSummaryHasCount(summary_str, "record_latency_MapDataset/_4", 1) self._assertSummaryHasCount(summary_str, "record_latency_PrefetchDataset/_6", 1)
def function_apply_options(dataset, aggregator, prefix="", counter_prefix=""): options = dataset_ops.Options() options.experimental_stats = stats_options.StatsOptions() options.experimental_stats.aggregator = aggregator options.experimental_stats.prefix = prefix options.experimental_stats.counter_prefix = counter_prefix options.experimental_stats.latency_all_edges = False return dataset.with_options(options)
def testOptionsHaveDefaults(self): options1 = dataset_ops.Options() options2 = dataset_ops.Options() self.assertIsNot(options1.experimental_optimization, options2.experimental_optimization) self.assertIsNot(options1.experimental_stats, options2.experimental_stats) self.assertIsNot(options1.experimental_threading, options2.experimental_threading) self.assertEqual(options1.experimental_optimization, optimization_options.OptimizationOptions()) self.assertEqual(options1.experimental_stats, stats_options.StatsOptions()) self.assertEqual(options1.experimental_threading, threading_options.ThreadingOptions())
def testLatencyStatsOptimizationV2(self): aggregator = stats_aggregator.StatsAggregator() dataset = dataset_ops.Dataset.from_tensors(1).apply( optimization.assert_next([ "LatencyStats", "Map", "LatencyStats", "Prefetch", "LatencyStats" ])).map(lambda x: x * x).prefetch(1) options = dataset_ops.Options() options.experimental_stats = stats_options.StatsOptions(aggregator) dataset = dataset.with_options(options) self.assertDatasetProduces(dataset, expected_output=[1], requires_initialization=True, num_test_iterations=1) summary_t = aggregator.get_summary() summary_str = self.evaluate(summary_t) self._assertSummaryHasCount(summary_str, "record_latency_TensorDataset/_1", 1) self._assertSummaryHasCount(summary_str, "record_latency_MapDataset/_4", 1) self._assertSummaryHasCount(summary_str, "record_latency_PrefetchDataset/_6", 1)