def _initialize_atomic(self, name, root, real_name=None, count=1): real_name = real_name or name root[name] = { # streaming algorithms "sa": [[streaming.MinComputation(), None], [streaming.PercentileComputation(0.5, self.iters_num), None], [streaming.PercentileComputation(0.9, self.iters_num), None], [streaming.PercentileComputation(0.95, self.iters_num), None], [streaming.MaxComputation(), None], [streaming.MeanComputation(), None], [ streaming.MeanComputation(), lambda st, has_result: ("%.1f%%" % (st.result() * 100) if has_result else "n/a") ], [ streaming.IncrementComputation(), lambda st, has_result: st.result() ]], "children": collections.OrderedDict(), "real_name": real_name, "count_per_iteration": count }
def test_merge(self): single_inc = algo.IncrementComputation() for val in six.moves.range(100): single_inc.add(val) incs = [algo.IncrementComputation() for _ in six.moves.range(10)] for idx, inc in enumerate(incs): for val in six.moves.range(idx * 10, (idx + 1) * 10): inc.add(val) merged_inc = incs[0] for inc in incs[1:]: merged_inc.merge(inc) self.assertEqual(single_inc._count, merged_inc._count) self.assertEqual(single_inc.result(), merged_inc.result())
def _init_columns(self): return costilius.OrderedDict( (("Min (sec)", streaming.MinComputation()), ("Median (sec)", streaming.PercentileComputation(50)), ("90%ile (sec)", streaming.PercentileComputation(90)), ("95%ile (sec)", streaming.PercentileComputation(95)), ("Max (sec)", streaming.MaxComputation()), ("Avg (sec)", streaming.MeanComputation()), ("Success", streaming.ProgressComputation(self.base_size)), ("Count", streaming.IncrementComputation())))
def add_iteration(self, iteration): for name, value in self._map_iteration_values(iteration): if name not in self._data: self._data[name] = [ streaming.PointsSaver(), streaming.IncrementComputation(), streaming.MinComputation(), streaming.MaxComputation(), streaming.MeanComputation() ] points, count, min_v, max_v, avg = self._data[name] count.add() for ins in (points, min_v, max_v, avg): ins.add(value)
def __init__(self, *args, **kwargs): super(MainStatsTable, self).__init__(*args, **kwargs) iters_num = self._workload["total_iteration_count"] for name in (self._get_atomic_names() + ["total"]): self._data[name] = [ [streaming.MinComputation(), None], [streaming.PercentileComputation(0.5, iters_num), None], [streaming.PercentileComputation(0.9, iters_num), None], [streaming.PercentileComputation(0.95, iters_num), None], [streaming.MaxComputation(), None], [streaming.MeanComputation(), None], [streaming.MeanComputation(), lambda st, has_result: ("%.1f%%" % (st.result() * 100) if has_result else "n/a")], [streaming.IncrementComputation(), lambda st, has_result: st.result()]]
def add_iteration(self, iteration): for name, value in self._map_iteration_values(iteration): if name not in self._data: iters_num = self._workload["total_iteration_count"] self._data[name] = [ [streaming.MinComputation(), None], [streaming.PercentileComputation(0.5, iters_num), None], [streaming.PercentileComputation(0.9, iters_num), None], [streaming.PercentileComputation(0.95, iters_num), None], [streaming.MaxComputation(), None], [streaming.MeanComputation(), None], [streaming.IncrementComputation(), lambda v, na: v.result()]] self._data[name][-1][0].add(None) self._data[name][-2][0].add(1) for idx, dummy in enumerate(self._data[name][:-1]): self._data[name][idx][0].add(value)
def _initialize_atomic(self, name, root, real_name=None, count=1): real_name = real_name or name root[name] = { # streaming algorithms "sa": [ streaming.PointsSaver(), streaming.MinComputation(), streaming.MaxComputation(), streaming.MeanComputation(), streaming.MeanComputation(), streaming.IncrementComputation() ], "children": collections.OrderedDict(), "real_name": real_name, "count_per_iteration": count }
def _init_row(self, name, iterations_count): def round_3(stream, no_result): if no_result: return "n/a" return round(stream.result(), 3) return [("Action", name), ("Min (sec)", streaming.MinComputation(), round_3), ("Median (sec)", streaming.PercentileComputation(0.5, iterations_count), round_3), ("90%ile (sec)", streaming.PercentileComputation(0.9, iterations_count), round_3), ("95%ile (sec)", streaming.PercentileComputation(0.95, iterations_count), round_3), ("Max (sec)", streaming.MaxComputation(), round_3), ("Avg (sec)", streaming.MeanComputation(), round_3), ("Success", streaming.MeanComputation(), lambda stream, no_result: "%.1f%%" % (stream.result() * 100)), ("Count", streaming.IncrementComputation(), lambda x, no_result: x.result())]
def test_add_and_result(self): comp = algo.IncrementComputation() for i in range(1, 100): self.assertEqual(i - 1, comp.result()) comp.add(42) self.assertEqual(i, comp.result())