def testBytesProduced(self, dataset_transformation):
        aggregator = stats_aggregator.StatsAggregator()
        dataset = dataset_ops.Dataset.range(100).map(
            lambda x: array_ops.tile([x], ops.convert_to_tensor([x]))).apply(
                stats_ops.bytes_produced_stats("bytes_produced"))
        dataset = dataset_transformation(dataset, aggregator)
        iterator = dataset.make_initializable_iterator()
        next_element = iterator.get_next()
        summary_t = aggregator.get_summary()

        with self.cached_session() as sess:
            self.evaluate(iterator.initializer)
            expected_sum = 0.0
            for i in range(100):
                self.assertAllEqual(np.array([i] * i, dtype=np.int64),
                                    sess.run(next_element))
                summary_str = self.evaluate(summary_t)
                self._assertSummaryHasCount(summary_str, "bytes_produced",
                                            float(i + 1))
                expected_sum += i * 8.0
                self._assertSummaryHasSum(summary_str, "bytes_produced",
                                          expected_sum)
            with self.assertRaises(errors.OutOfRangeError):
                sess.run(next_element)
            summary_str = self.evaluate(summary_t)
            self._assertSummaryHasCount(summary_str, "bytes_produced", 100.0)
            self._assertSummaryHasSum(summary_str, "bytes_produced",
                                      expected_sum)
  def testBytesProduced(self, dataset_transformation):
    aggregator = stats_aggregator.StatsAggregator()
    dataset = dataset_ops.Dataset.range(100).map(
        lambda x: array_ops.tile([x], ops.convert_to_tensor([x]))).apply(
            stats_ops.bytes_produced_stats("bytes_produced"))
    dataset = dataset_transformation(dataset, aggregator)
    iterator = dataset_ops.make_initializable_iterator(dataset)
    next_element = iterator.get_next()
    summary_t = aggregator.get_summary()

    with self.cached_session() as sess:
      self.evaluate(iterator.initializer)
      expected_sum = 0.0
      for i in range(100):
        self.assertAllEqual(
            np.array([i] * i, dtype=np.int64), self.evaluate(next_element))
        summary_str = self.evaluate(summary_t)
        self._assertSummaryHasCount(summary_str, "bytes_produced", float(i + 1))
        expected_sum += i * 8.0
        self._assertSummaryHasSum(summary_str, "bytes_produced", expected_sum)
      with self.assertRaises(errors.OutOfRangeError):
        self.evaluate(next_element)
      summary_str = self.evaluate(summary_t)
      self._assertSummaryHasCount(summary_str, "bytes_produced", 100.0)
      self._assertSummaryHasSum(summary_str, "bytes_produced", expected_sum)
 def test_bytes_produced_stats_invalid_tag_shape(self):
   with self.assertRaisesRegexp(
       ValueError, "Shape must be rank 0 but is rank 1"):
     # pylint: disable=g-long-lambda
     self.run_core_tests(
         lambda: dataset_ops.Dataset.range(100).apply(
             stats_ops.bytes_produced_stats(["bytes_produced"])), 100)
Example #4
0
    def testBytesProduced(self, dataset_transformation):
        aggregator = stats_aggregator.StatsAggregator()
        dataset = dataset_ops.Dataset.range(100).map(
            lambda x: array_ops.tile([x], ops.convert_to_tensor([x]))).apply(
                stats_ops.bytes_produced_stats("bytes_produced"))
        dataset = dataset_transformation(dataset, aggregator)
        next_element = self.getNext(dataset, requires_initialization=True)
        summary_t = aggregator.get_summary()

        expected_sum = 0.0
        for i in range(100):
            self.assertAllEqual(np.array([i] * i, dtype=np.int64),
                                self.evaluate(next_element()))
            summary_str = self.evaluate(aggregator.get_summary())
            self._assertSummaryHasCount(summary_str, "bytes_produced",
                                        float(i + 1))
            expected_sum += i * 8.0
            self._assertSummaryHasSum(summary_str, "bytes_produced",
                                      expected_sum)
        with self.assertRaises(errors.OutOfRangeError):
            self.evaluate(next_element())
        # TODO(shivaniagrawal): ntentional breaking case
        summary_str = self.evaluate(summary_t)
        self._assertSummaryHasCount(summary_str, "bytes_produced", 100.0)
        self._assertSummaryHasSum(summary_str, "bytes_produced", expected_sum)
 def test_bytes_produced_stats_invalid_tag_shape(self):
   with self.assertRaisesRegexp(
       ValueError, "Shape must be rank 0 but is rank 1"):
     # pylint: disable=g-long-lambda
     self.run_core_tests(
         lambda: dataset_ops.Dataset.range(100).apply(
             stats_ops.bytes_produced_stats(["bytes_produced"])),
         None, 100)
  def testBytesProduced(self):
    aggregator = stats_aggregator.StatsAggregator()
    dataset = dataset_ops.Dataset.range(100).map(
        lambda x: array_ops.tile([x], ops.convert_to_tensor([x]))).apply(
            stats_ops.bytes_produced_stats("bytes_produced"))
    dataset = self.datasetExperimentalStats(dataset, aggregator)
    next_element = self.getNext(dataset, requires_initialization=True)

    expected_sum = 0.0
    for i in range(100):
      self.assertAllEqual(
          np.array([i] * i, dtype=np.int64), self.evaluate(next_element()))
      handle = self.getHandle(aggregator)
      self.assertStatisticsHasCount(handle, "bytes_produced", float(i + 1),
                                    i + 2)
      expected_sum += i * 8.0
      self.assertStatisticsHasSum(handle, "bytes_produced", expected_sum, i + 2)
    with self.assertRaises(errors.OutOfRangeError):
      self.evaluate(next_element())
    handle = self.getHandle(aggregator)
    self.assertStatisticsHasCount(handle, "bytes_produced", 100.0, 101)
    self.assertStatisticsHasSum(handle, "bytes_produced", expected_sum, 101)
  def testBytesProduced(self):
    aggregator = stats_aggregator.StatsAggregator()
    dataset = dataset_ops.Dataset.range(100).map(
        lambda x: array_ops.tile([x], ops.convert_to_tensor([x]))).apply(
            stats_ops.bytes_produced_stats("bytes_produced"))
    dataset = self.datasetExperimentalStats(dataset, aggregator)
    next_element = self.getNext(dataset, requires_initialization=True)

    expected_sum = 0.0
    for i in range(100):
      self.assertAllEqual(
          np.array([i] * i, dtype=np.int64), self.evaluate(next_element()))
      handle = self.getHandle(aggregator)
      self.assertStatisticsHasCount(handle, "bytes_produced", float(i + 1),
                                    i + 2)
      expected_sum += i * 8.0
      self.assertStatisticsHasSum(handle, "bytes_produced", expected_sum, i + 2)
    with self.assertRaises(errors.OutOfRangeError):
      self.evaluate(next_element())
    handle = self.getHandle(aggregator)
    self.assertStatisticsHasCount(handle, "bytes_produced", 100.0, 101)
    self.assertStatisticsHasSum(handle, "bytes_produced", expected_sum, 101)
  def testBytesProduced(self, dataset_transformation):
    aggregator = stats_aggregator.StatsAggregator()
    dataset = dataset_ops.Dataset.range(100).map(
        lambda x: array_ops.tile([x], ops.convert_to_tensor([x]))).apply(
            stats_ops.bytes_produced_stats("bytes_produced"))
    dataset = dataset_transformation(dataset, aggregator)
    next_element = self.getNext(dataset, requires_initialization=True)
    summary_t = aggregator.get_summary()

    expected_sum = 0.0
    for i in range(100):
      self.assertAllEqual(
          np.array([i] * i, dtype=np.int64), self.evaluate(next_element()))
      summary_str = self.evaluate(aggregator.get_summary())
      self._assertSummaryHasCount(summary_str, "bytes_produced", float(i + 1))
      expected_sum += i * 8.0
      self._assertSummaryHasSum(summary_str, "bytes_produced", expected_sum)
    with self.assertRaises(errors.OutOfRangeError):
      self.evaluate(next_element())
    # TODO(shivaniagrawal): ntentional breaking case
    summary_str = self.evaluate(summary_t)
    self._assertSummaryHasCount(summary_str, "bytes_produced", 100.0)
    self._assertSummaryHasSum(summary_str, "bytes_produced", expected_sum)
 def _build_dataset_bytes_stats(self, num_elements):
   return dataset_ops.Dataset.range(num_elements).map(
       lambda x: array_ops.tile([x], ops.convert_to_tensor([x]))).apply(
           stats_ops.bytes_produced_stats("bytes_produced"))
 def _build_dataset_bytes_stats(self, num_elements):
   return dataset_ops.Dataset.range(num_elements).map(
       lambda x: array_ops.tile([x], ops.convert_to_tensor([x]))).apply(
           stats_ops.bytes_produced_stats("bytes_produced"))