def _initialize_atomic(self, name, root, real_name=None, count=1): real_name = real_name or name root[name] = { # streaming algorithms "sa": [[streaming.MinComputation(), None], [streaming.PercentileComputation(0.5, self.iters_num), None], [streaming.PercentileComputation(0.9, self.iters_num), None], [streaming.PercentileComputation(0.95, self.iters_num), None], [streaming.MaxComputation(), None], [streaming.MeanComputation(), None], [ streaming.MeanComputation(), lambda st, has_result: ("%.1f%%" % (st.result() * 100) if has_result else "n/a") ], [ streaming.IncrementComputation(), lambda st, has_result: st.result() ]], "children": collections.OrderedDict(), "real_name": real_name, "count_per_iteration": count }
def test_merge(self): single_max_algo = algo.MaxComputation() for val in six.moves.range(100): single_max_algo.add(val) algos = [algo.MaxComputation() for _ in six.moves.range(10)] for idx, max_algo in enumerate(algos): for val in six.moves.range(idx * 10, (idx + 1) * 10): max_algo.add(val) merged_max_algo = algos[0] for max_algo in algos[1:]: merged_max_algo.merge(max_algo) self.assertEqual(single_max_algo._value, merged_max_algo._value) self.assertEqual(single_max_algo.result(), merged_max_algo.result())
def _init_columns(self): return costilius.OrderedDict( (("Min (sec)", streaming.MinComputation()), ("Median (sec)", streaming.PercentileComputation(50)), ("90%ile (sec)", streaming.PercentileComputation(90)), ("95%ile (sec)", streaming.PercentileComputation(95)), ("Max (sec)", streaming.MaxComputation()), ("Avg (sec)", streaming.MeanComputation()), ("Success", streaming.ProgressComputation(self.base_size)), ("Count", streaming.IncrementComputation())))
def add_iteration(self, iteration): for name, value in self._map_iteration_values(iteration): if name not in self._data: self._data[name] = [ streaming.PointsSaver(), streaming.IncrementComputation(), streaming.MinComputation(), streaming.MaxComputation(), streaming.MeanComputation() ] points, count, min_v, max_v, avg = self._data[name] count.add() for ins in (points, min_v, max_v, avg): ins.add(value)
def __init__(self, *args, **kwargs): super(MainStatsTable, self).__init__(*args, **kwargs) iters_num = self._workload["total_iteration_count"] for name in (self._get_atomic_names() + ["total"]): self._data[name] = [ [streaming.MinComputation(), None], [streaming.PercentileComputation(0.5, iters_num), None], [streaming.PercentileComputation(0.9, iters_num), None], [streaming.PercentileComputation(0.95, iters_num), None], [streaming.MaxComputation(), None], [streaming.MeanComputation(), None], [streaming.MeanComputation(), lambda st, has_result: ("%.1f%%" % (st.result() * 100) if has_result else "n/a")], [streaming.IncrementComputation(), lambda st, has_result: st.result()]]
def add_iteration(self, iteration): for name, value in self._map_iteration_values(iteration): if name not in self._data: iters_num = self._workload["total_iteration_count"] self._data[name] = [ [streaming.MinComputation(), None], [streaming.PercentileComputation(0.5, iters_num), None], [streaming.PercentileComputation(0.9, iters_num), None], [streaming.PercentileComputation(0.95, iters_num), None], [streaming.MaxComputation(), None], [streaming.MeanComputation(), None], [streaming.IncrementComputation(), lambda v, na: v.result()]] self._data[name][-1][0].add(None) self._data[name][-2][0].add(1) for idx, dummy in enumerate(self._data[name][:-1]): self._data[name][idx][0].add(value)
def _initialize_atomic(self, name, root, real_name=None, count=1): real_name = real_name or name root[name] = { # streaming algorithms "sa": [ streaming.PointsSaver(), streaming.MinComputation(), streaming.MaxComputation(), streaming.MeanComputation(), streaming.MeanComputation(), streaming.IncrementComputation() ], "children": collections.OrderedDict(), "real_name": real_name, "count_per_iteration": count }
def _init_row(self, name, iterations_count): def round_3(stream, no_result): if no_result: return "n/a" return round(stream.result(), 3) return [("Action", name), ("Min (sec)", streaming.MinComputation(), round_3), ("Median (sec)", streaming.PercentileComputation(0.5, iterations_count), round_3), ("90%ile (sec)", streaming.PercentileComputation(0.9, iterations_count), round_3), ("95%ile (sec)", streaming.PercentileComputation(0.95, iterations_count), round_3), ("Max (sec)", streaming.MaxComputation(), round_3), ("Avg (sec)", streaming.MeanComputation(), round_3), ("Success", streaming.MeanComputation(), lambda stream, no_result: "%.1f%%" % (stream.result() * 100)), ("Count", streaming.IncrementComputation(), lambda x, no_result: x.result())]
def test_result_empty(self): comp = algo.MaxComputation() self.assertRaises(TypeError, comp.result, 1) self.assertIsNone(comp.result())
def test_add_raises(self): comp = algo.MaxComputation() self.assertRaises(TypeError, comp.add) self.assertRaises(TypeError, comp.add, None) self.assertRaises(TypeError, comp.add, "str")
def test_add_and_result(self): comp = algo.MaxComputation() [comp.add(i) for i in [3, 5.2, 2, -1, 1, 8, 33.4, 0, -3, 42, -2]] self.assertEqual(42, comp.result())
def test_result_raises(self): comp = algo.MaxComputation() self.assertRaises(TypeError, comp.result, 1) self.assertRaises(ValueError, comp.result)
def __init__(self): self.min_timestamp = streaming.MinComputation() self.max_timestamp = streaming.MaxComputation() self.mttr = 0 self.last_error_duration = 0 self.last_iteration = None