Ejemplo n.º 1
0
 def rules(self, output, benchmarks, bm_suite_args):
     bench_name = os.path.basename(os.path.splitext(benchmarks[0])[0])
     arg = " ".join(self._benchmarks[bench_name])
     return [
         # warmup curves
         StdOutRule(
             r"^### iteration=(?P<iteration>[0-9]+), name=(?P<benchmark>[a-zA-Z0-9.\-]+), duration=(?P<time>[0-9]+(\.[0-9]+)?$)",  # pylint: disable=line-too-long
             {
                 "benchmark": '{}.{}'.format(self._name, bench_name),
                 "metric.name": "warmup",
                 "metric.iteration": ("<iteration>", int),
                 "metric.type": "numeric",
                 "metric.value": ("<time>", float),
                 "metric.unit": "s",
                 "metric.score-function": "id",
                 "metric.better": "lower",
                 "config.run-flags": "".join(arg),
             }),
         # no warmups
         StdOutRule(
             r"^@@@ name=(?P<benchmark>[a-zA-Z0-9.\-]+), duration=(?P<time>[0-9]+(\.[0-9]+)?$)",  # pylint: disable=line-too-long
             {
                 "benchmark": '{}.{}'.format(self._name, bench_name),
                 "metric.name": "time",
                 "metric.iteration": 0,
                 "metric.type": "numeric",
                 "metric.value": ("<time>", float),
                 "metric.unit": "s",
                 "metric.score-function": "id",
                 "metric.better": "lower",
                 "config.run-flags": "".join(arg),
             }),
     ]
Ejemplo n.º 2
0
    def rules(self, output, benchmarks, bm_suite_args):
        bench_name = self.get_bench_name(benchmarks)
        arg = self.get_arg(self.runArgs(bm_suite_args), bench_name)

        return [
            # warmup curves
            StdOutRule(
                r"^### iteration=(?P<iteration>[0-9]+), name=(?P<benchmark>[a-zA-Z0-9._\-]+), duration=(?P<time>[0-9]+(\.[0-9]+)?$)",  # pylint: disable=line-too-long
                self.with_branch_and_commit_dict({
                    "benchmark": '{}.{}'.format(self._name, bench_name),
                    "metric.name": "warmup",
                    "metric.iteration": ("<iteration>", int),
                    "metric.type": "numeric",
                    "metric.value": ("<time>", float),
                    "metric.unit": "s",
                    "metric.score-function": "id",
                    "metric.better": "lower",
                    "config.run-flags": "".join(arg),
                })
            ),
            # secondary metric(s)
            StdOutRule(
                r"### WARMUP detected at iteration: (?P<endOfWarmup>[0-9]+$)",
                self.with_branch_and_commit_dict({
                    "benchmark": '{}.{}'.format(self._name, bench_name),
                    "metric.name": "end-of-warmup",
                    "metric.iteration": 0,
                    "metric.type": "numeric",
                    "metric.value": ("<endOfWarmup>", int),
                    "metric.unit": "s",
                    "metric.score-function": "id",
                    "metric.better": "lower",
                    "config.run-flags": "".join(arg),
                })
            ),

            # no warmups
            StdOutRule(
                r"^@@@ name=(?P<benchmark>[a-zA-Z0-9._\-]+), duration=(?P<time>[0-9]+(\.[0-9]+)?$)",  # pylint: disable=line-too-long
                self.with_branch_and_commit_dict({
                    "benchmark": '{}.{}'.format(self._name, bench_name),
                    "metric.name": "time",
                    "metric.iteration": 0,
                    "metric.type": "numeric",
                    "metric.value": ("<time>", float),
                    "metric.unit": "s",
                    "metric.score-function": "id",
                    "metric.better": "lower",
                    "config.run-flags": "".join(arg),
                })
            ),
        ]
Ejemplo n.º 3
0
    def rules(self, output, benchmarks, bm_suite_args):
        bench_name = self.get_bench_name(benchmarks)
        arg = self.get_arg(bm_suite_args, bench_name)

        return [
            # startup (difference between start of VM to end of first iteration)
            StdOutRule(
                r"### STARTUP +at iteration: (?P<iteration>[0-9]+), +duration: (?P<time>[0-9]+(\.[0-9]+)?$)",
                self.with_branch_and_commit_dict({
                    "benchmark": '{}.{}'.format(self._name, bench_name),
                    "metric.name": "startup",
                    "metric.iteration": ("<iteration>", int),
                    "metric.type": "numeric",
                    "metric.value": ("<time>", float),
                    "metric.unit": "s",
                    "metric.score-function": "id",
                    "metric.better": "lower",
                    "config.run-flags": "".join(arg),
                })
            ),

            StdOutRule(
                r"### EARLY WARMUP +at iteration: (?P<iteration>[0-9]+), +duration: (?P<time>[0-9]+(\.[0-9]+)?$)",
                self.with_branch_and_commit_dict({
                    "benchmark": '{}.{}'.format(self._name, bench_name),
                    "metric.name": "early-warmup",
                    "metric.iteration": ("<iteration>", int),
                    "metric.type": "numeric",
                    "metric.value": ("<time>", float),
                    "metric.unit": "s",
                    "metric.score-function": "id",
                    "metric.better": "lower",
                    "config.run-flags": "".join(arg),
                })
            ),

            StdOutRule(
                r"### LATE WARMUP +at iteration: (?P<iteration>[0-9]+), +duration: (?P<time>[0-9]+(\.[0-9]+)?$)",
                self.with_branch_and_commit_dict({
                    "benchmark": '{}.{}'.format(self._name, bench_name),
                    "metric.name": "late-warmup",
                    "metric.iteration": ("<iteration>", int),
                    "metric.type": "numeric",
                    "metric.value": ("<time>", float),
                    "metric.unit": "s",
                    "metric.score-function": "id",
                    "metric.better": "lower",
                    "config.run-flags": "".join(arg),
                })
            ),
        ]