def testBytesProduced(self):
    stats_aggregator = stats_ops.StatsAggregator()
    dataset = dataset_ops.Dataset.range(100).map(
        lambda x: array_ops.tile([x], ops.convert_to_tensor([x]))).apply(
            stats_ops.bytes_produced_stats("bytes_produced")).apply(
                stats_ops.set_stats_aggregator(stats_aggregator))
    iterator = dataset.make_initializable_iterator()
    next_element = iterator.get_next()
    summary_t = stats_aggregator.get_summary()

    with self.test_session() as sess:
      sess.run(iterator.initializer)
      expected_sum = 0.0
      for i in range(100):
        self.assertAllEqual(
            np.array([i] * i, dtype=np.int64), sess.run(next_element))
        summary_str = sess.run(summary_t)
        self._assertSummaryHasCount(summary_str, "bytes_produced", float(i + 1))
        expected_sum += i * 8.0
        self._assertSummaryHasSum(summary_str, "bytes_produced", expected_sum)
      with self.assertRaises(errors.OutOfRangeError):
        sess.run(next_element)
      summary_str = sess.run(summary_t)
      self._assertSummaryHasCount(summary_str, "bytes_produced", 100.0)
      self._assertSummaryHasSum(summary_str, "bytes_produced", expected_sum)
  def testBytesProduced(self):
    dataset = dataset_ops.Dataset.range(100).map(
        lambda x: array_ops.tile([x], ops.convert_to_tensor([x]))).apply(
            stats_ops.bytes_produced_stats("bytes_produced"))
    iterator = dataset.make_initializable_iterator()
    stats_aggregator = stats_ops.StatsAggregator()
    stats_aggregator_subscriber = stats_aggregator.subscribe(iterator)
    next_element = iterator.get_next()
    summary_t = stats_aggregator.get_summary()

    with self.test_session() as sess:
      sess.run([iterator.initializer, stats_aggregator_subscriber])
      expected_sum = 0.0
      for i in range(100):
        self.assertAllEqual(
            np.array([i] * i, dtype=np.int64), sess.run(next_element))
        summary_str = sess.run(summary_t)
        self._assertSummaryHasCount(summary_str, "bytes_produced", float(i + 1))
        expected_sum += i * 8.0
        self._assertSummaryHasSum(summary_str, "bytes_produced", expected_sum)
      with self.assertRaises(errors.OutOfRangeError):
        sess.run(next_element)
      summary_str = sess.run(summary_t)
      self._assertSummaryHasCount(summary_str, "bytes_produced", 100.0)
      self._assertSummaryHasSum(summary_str, "bytes_produced", expected_sum)
Exemplo n.º 3
0
 def test_bytes_produced_stats_invalid_tag_shape(self):
     with self.assertRaisesRegexp(ValueError,
                                  'Shape must be rank 0 but is rank 1'):
         self.run_core_tests(
             lambda: dataset_ops.Dataset.range(100).apply(
                 stats_ops.bytes_produced_stats(["bytes_produced"])), None,
             100)
 def test_bytes_produced_stats_invalid_tag_shape(self):
   with self.assertRaisesRegexp(
       ValueError, 'Shape must be rank 0 but is rank 1'):
     self.run_core_tests(
         lambda: dataset_ops.Dataset.range(100).apply(
             stats_ops.bytes_produced_stats(["bytes_produced"])),
         None, 100)
 def _build_dataset_bytes_stats(self, num_elements):
   return dataset_ops.Dataset.range(num_elements).map(
       lambda x: array_ops.tile([x], ops.convert_to_tensor([x]))).apply(
           stats_ops.bytes_produced_stats("bytes_produced"))
 def _build_dataset_bytes_stats(self, num_elements):
   return dataset_ops.Dataset.range(num_elements).map(
       lambda x: array_ops.tile([x], ops.convert_to_tensor([x]))).apply(
           stats_ops.bytes_produced_stats("bytes_produced"))