Exemplo n.º 1
0
def test_capital_ell():
    assert Sample.parse(
        "P1;T0x7f546684;foo_module.py:foo:10;loo_module.py:Loo:20 10",
        MetricType.TIME,
    )[0] == Sample(
        1,
        "0x7f546684",
        Metric(MetricType.TIME, 10),
        [Frame("foo", "foo_module.py", 10),
         Frame("Loo", "loo_module.py", 20)],
    )
Exemplo n.º 2
0
def test_dump():
    stats = AustinStats(AustinStatsType.WALL)

    EMPTY_SAMPLE = "P42;T0x7f45645646 1"
    FOO_SAMPLE = "P42;T0x7f45645646;foo_module.py:foo:10 150"
    BAR_SAMPLE = "P42;T0x7f45645646;foo_module.py:foo:10;bar_sample.py:bar:20 1000"

    stats.update(Sample.parse(FOO_SAMPLE, MetricType.TIME)[0])
    stats.update(Sample.parse(FOO_SAMPLE, MetricType.TIME)[0])
    stats.update(Sample.parse(BAR_SAMPLE, MetricType.TIME)[0])
    stats.update(Sample.parse(EMPTY_SAMPLE, MetricType.TIME)[0])

    buffer = io.StringIO()
    stats.dump(buffer)
    assert buffer.getvalue() == DUMP_LOAD_SAMPLES.format(
        "P42;T0x7f45645646 1\n")
Exemplo n.º 3
0
    def parse(text: str) -> "WebFrame":
        """Create a frame from an Austin collapsed sample."""
        def build_frame(frames: List[Frame]) -> "WebFrame":
            first, *tail = frames
            frame = WebFrame(str(first), sample.metrics.time)
            if tail:
                child = build_frame(tail)
                frame.add_child(child)
                frame.height = child.height + 1
            else:
                frame.value = sample.metrics.time
            return frame

        sample = Sample.parse(text)
        process_frame = WebFrame(sample.pid, sample.metrics.time)
        thread_frame = WebFrame(sample.thread, sample.metrics.time)

        if sample.frames:
            thread_frame.add_child(build_frame(sample.frames))

        thread_frame.height = len(sample.frames) + 1

        process_frame.add_child(thread_frame)
        process_frame.height = thread_frame.height + 1

        root = WebFrame.new_root()
        root.add_child(process_frame)
        root.value = process_frame.value
        root.height = process_frame.height + 1

        return root
Exemplo n.º 4
0
def test_deepcopy():
    stats = AustinStats(stats_type=AustinStatsType.WALL)

    stats.update(
        Sample.parse("P42;T0x7f45645646;foo_module.py:foo:10 152",
                     MetricType.TIME)[0])
    cloned_stats = deepcopy(stats)
    assert cloned_stats == stats
    assert cloned_stats is not stats
Exemplo n.º 5
0
 def get_frames(text: str) -> List[Tuple[FoldedStack, Metric]]:
     """Get the folded stacks and metrics from a string of samples."""
     x = []
     for _ in text.splitlines(keepends=False):
         try:
             sample = Sample.parse(_)
             x.append((sample.frames, sample.metrics))
         except InvalidSample:
             continue
     return x
Exemplo n.º 6
0
        def _dump_pprof():
            pprof = PProf()

            for line in self.data:
                try:
                    pprof.add_sample(Sample.parse(line))
                except InvalidSample:
                    continue

            _dump(f".austin_{int((time() * 1e6) % 1e14)}.pprof", stream, pprof)
Exemplo n.º 7
0
def test_sample_alt_format():
    assert Sample.parse(
        "P1;T7fdf1b437700;/usr/lib/python3.6/threading.py:_bootstrap;L884;"
        "/usr/lib/python3.6/threading.py:_bootstrap_inner;L916;"
        "/usr/lib/python3.6/threading.py:run;L864;"
        "test/target34.py:keep_cpu_busy;L31 "
        "10085",
        MetricType.TIME,
    )[0] == Sample(
        1,
        "7fdf1b437700",
        Metric(MetricType.TIME, 10085),
        [
            Frame.parse("/usr/lib/python3.6/threading.py:_bootstrap:884"),
            Frame.parse(
                "/usr/lib/python3.6/threading.py:_bootstrap_inner:916"),
            Frame.parse("/usr/lib/python3.6/threading.py:run:864"),
            Frame.parse("test/target34.py:keep_cpu_busy:31"),
        ],
    )
Exemplo n.º 8
0
        def _dump_speedscope():
            name = f"austin_{int((time() * 1e6) % 1e14)}"
            speedscope = Speedscope(name)

            for line in self.data:
                try:
                    speedscope.add_sample(Sample.parse(line))
                except InvalidSample:
                    continue

            _dump(f".{name}.json", stream, speedscope)
Exemplo n.º 9
0
def main() -> None:
    """austin2speedscope entry point."""
    from argparse import ArgumentParser
    import os

    arg_parser = ArgumentParser(
        prog="austin2speedscope",
        description=
        ("Convert Austin generated profiles to the Speedscope JSON format "
         "accepted by https://speedscope.app. The output will contain a profile "
         "for each thread and metric included in the input file."),
    )

    arg_parser.add_argument(
        "input",
        type=str,
        help="The input file containing Austin samples in normal format.",
    )
    arg_parser.add_argument(
        "output",
        type=str,
        help="The name of the output Speedscope JSON file.")
    arg_parser.add_argument(
        "--indent",
        type=int,
        help="Give a non-null value to prettify the JSON output.")

    arg_parser.add_argument("-V",
                            "--version",
                            action="version",
                            version=__version__)

    args = arg_parser.parse_args()

    try:
        with AustinFileReader(args.input) as fin:
            mode = fin.metadata["mode"]
            speedscope = Speedscope(os.path.basename(args.input), mode,
                                    args.indent)
            for line in fin:
                try:
                    speedscope.add_samples(
                        Sample.parse(line, MetricType.from_mode(mode)))
                except InvalidSample:
                    continue

    except FileNotFoundError:
        print(f"No such input file: {args.input}")
        exit(1)

    with open(args.output, "w") as fout:
        speedscope.dump(fout)
Exemplo n.º 10
0
def test_austin_file_reader(datapath):
    with AustinFileReader(datapath / "austin.out") as fr:
        assert fr.metadata == {
            "austin": "3.0.0",
            "interval": "10000",
            "mode": "wall",
        }

        assert sum(bool(Sample.parse(line, MetricType.TIME))
                   for line in fr) == 73

        assert fr.metadata == {
            "austin": "3.0.0",
            "interval": "10000",
            "mode": "wall",
            "duration": "383010",
        }
Exemplo n.º 11
0
def test_pprof(datapath):
    with AustinFileReader(datapath / "austin.out") as austin:
        mode = austin.metadata["mode"]
        prof = PProf(mode)

        for line in austin:
            try:
                prof.add_samples(Sample.parse(line,
                                              MetricType.from_mode(mode)))
            except InvalidSample:
                continue

        bstream = io.BytesIO()
        prof.dump(bstream)

        with open(datapath / "austin.pprof", "rb") as pprof:
            assert pprof.read() == bstream.getvalue()
Exemplo n.º 12
0
 def update(self, raw_sample: str) -> None:
     """Update current statistics with a new sample."""
     try:
         (sample, ) = Sample.parse(
             raw_sample,
             MetricType.MEMORY
             if self.mode is AustinProfileMode.MEMORY else MetricType.TIME,
         )
         self._stats.update(sample)
         self._stats.timestamp = time()
         thread_key = f"{sample.pid}:{sample.thread}"
         self._last_stack[thread_key] = sample
         self._threads.add(thread_key)
     except InvalidSample:
         self._invalids += 1
     finally:
         self._samples += 1
Exemplo n.º 13
0
    def check_tests(self) -> int:
        """Check all the registered tests against the collected statistics.

        Returns the number of failed checks.
        """
        if self.is_running():
            raise RuntimeError("Austin is still running.")

        if not self.data:
            return

        # Prepare stats
        for sample in self.data:
            try:
                self.stats.update(Sample.parse(sample))
            except InvalidSample:
                pass

        for function, modules in self.tests.items():
            for module, markers in modules.items():
                test_stats = self._find_test(function, module)
                if test_stats is None:
                    # The test was not found. Either there is no such test or
                    # Austin did not collect any statistics for it.
                    continue

                total_test_time = sum(fs.total.time for fs in test_stats)
                total_test_malloc = sum(fs.total.time if self.mode ==
                                        "-m" else fs.total.memory_alloc
                                        for fs in test_stats)
                total_test_dealloc = (sum(
                    fs.total.memory_dealloc
                    for fs in test_stats) if self.mode == "-f" else 0)

                for marker in markers:
                    outcome = marker(
                        test_stats,
                        total_test_time,
                        total_test_malloc,
                        total_test_dealloc,
                    )
                    self.report.append((function, module, outcome))

        return sum(1 for outcome in self.report if not outcome[2])
Exemplo n.º 14
0
def main() -> None:
    """austin2pprof entry point."""
    arg_parser = ArgumentParser(
        prog="austin2pprof",
        description=(
            "Convert Austin generated profiles to the pprof protobuf format. "
            "See https://github.com/google/pprof for more details."),
    )

    arg_parser.add_argument(
        "input",
        type=str,
        help="The input file containing Austin samples.",
    )
    arg_parser.add_argument("output",
                            type=str,
                            help="The name of the output pprof file.")

    arg_parser.add_argument("-V",
                            "--version",
                            action="version",
                            version="0.1.0")

    args = arg_parser.parse_args()

    try:
        with AustinFileReader(args.input) as fin:
            mode = fin.metadata["mode"]
            pprof = PProf(mode=mode)

            # Read samples
            for line in fin:
                try:
                    pprof.add_samples(
                        Sample.parse(line, MetricType.from_mode(mode)))
                except InvalidSample:
                    continue

    except FileNotFoundError:
        print(f"No such input file: {args.input}")
        exit(1)

    with open(args.output, "wb") as fout:
        pprof.dump(fout)
Exemplo n.º 15
0
def test_speedscope_wall_metrics_only(datapath):
    with AustinFileReader(datapath / "austin.out") as austin:
        mode = austin.metadata["mode"]
        assert Mode.from_metadata(mode) == Mode.WALL

        speedscope = Speedscope("austin.out", mode, indent=2)

        for line in austin:
            try:
                speedscope.add_samples(
                    Sample.parse(line, MetricType.from_mode(mode)))
            except InvalidSample:
                continue

        text_stream = io.StringIO()
        speedscope.dump(text_stream)

        with open(datapath / "austin.json", "r") as sprof:
            assert text_stream.getvalue() == sprof.read()
Exemplo n.º 16
0
def get_folded_stacks(
        text: str,
        threshold: float = DEFAULT_THRESHOLD
) -> List[Tuple[FoldedStack, Metric]]:
    """Get the folded stacks and Metric from a string of samples."""
    x = []
    max_time = 1
    for _ in track(
            text.splitlines(keepends=False),
            description="Extracting frames",
            console=CONSOLE,
            transient=True,
    ):
        try:
            (sample, ) = Sample.parse(_, MetricType.TIME)
            if sample.metric.value > max_time:
                max_time = sample.metric.value
            x.append((sample.frames, sample.metric.value))
        except InvalidSample:
            continue
    return [_ for _ in x if _[1] / max_time > threshold]
Exemplo n.º 17
0
def test_austin_stats_single_process():
    stats = AustinStats(stats_type=AustinStatsType.WALL)

    stats.update(
        Sample.parse("P42;T0x7f45645646;foo_module.py:foo:10 152",
                     MetricType.TIME)[0])
    assert stats == AustinStats(
        stats_type=AustinStatsType.WALL,
        processes={
            42:
            ProcessStats(
                pid=42,
                threads={
                    "0x7f45645646":
                    ThreadStats(
                        label="0x7f45645646",
                        own=Metric(MetricType.TIME, 0),
                        total=Metric(MetricType.TIME, 152),
                        children={
                            Frame("foo", "foo_module.py", 10):
                            FrameStats(
                                label=Frame("foo", "foo_module.py", 10),
                                own=Metric(MetricType.TIME, 152),
                                total=Metric(MetricType.TIME, 152),
                            )
                        },
                    )
                },
            )
        },
    )

    stats.update(Sample.parse("P42;T0x7f45645646 148", MetricType.TIME)[0])
    assert stats == AustinStats(
        stats_type=AustinStatsType.WALL,
        processes={
            42:
            ProcessStats(
                pid=42,
                threads={
                    "0x7f45645646":
                    ThreadStats(
                        label="0x7f45645646",
                        total=Metric(MetricType.TIME, 300),
                        own=Metric(MetricType.TIME, 148),
                        children={
                            Frame("foo", "foo_module.py", 10):
                            FrameStats(
                                label=Frame("foo", "foo_module.py", 10),
                                own=Metric(MetricType.TIME, 152),
                                total=Metric(MetricType.TIME, 152),
                            )
                        },
                    )
                },
            )
        },
    )

    stats.update(
        Sample.parse("P42;T0x7f45645646;foo_module.py:foo:10 100",
                     MetricType.TIME)[0])
    assert stats == AustinStats(
        stats_type=AustinStatsType.WALL,
        processes={
            42:
            ProcessStats(
                pid=42,
                threads={
                    "0x7f45645646":
                    ThreadStats(
                        label="0x7f45645646",
                        total=Metric(MetricType.TIME, 400),
                        own=Metric(MetricType.TIME, 148),
                        children={
                            Frame("foo", "foo_module.py", 10):
                            FrameStats(
                                label=Frame("foo", "foo_module.py", 10),
                                own=Metric(MetricType.TIME, 252),
                                total=Metric(MetricType.TIME, 252),
                            )
                        },
                    )
                },
            )
        },
    )

    stats.update(
        Sample.parse("P42;T0x7f45645646;foo_module.py:bar:35 400",
                     MetricType.TIME)[0])
    assert stats == AustinStats(
        stats_type=AustinStatsType.WALL,
        processes={
            42:
            ProcessStats(
                pid=42,
                threads={
                    "0x7f45645646":
                    ThreadStats(
                        label="0x7f45645646",
                        total=Metric(MetricType.TIME, 800),
                        own=Metric(MetricType.TIME, 148),
                        children={
                            Frame("foo", "foo_module.py", 10):
                            FrameStats(
                                label=Frame("foo", "foo_module.py", 10),
                                own=Metric(MetricType.TIME, 252),
                                total=Metric(MetricType.TIME, 252),
                            ),
                            Frame("bar", "foo_module.py", 35):
                            FrameStats(
                                label=Frame("bar", "foo_module.py", 35),
                                own=Metric(MetricType.TIME, 400),
                                total=Metric(MetricType.TIME, 400),
                            ),
                        },
                    )
                },
            )
        },
    )

    stats.update(
        Sample.parse("P42;T0x7f45645664;foo_module.py:foo:10 152",
                     MetricType.TIME)[0])
    assert stats == AustinStats(
        stats_type=AustinStatsType.WALL,
        processes={
            42:
            ProcessStats(
                pid=42,
                threads={
                    "0x7f45645664":
                    ThreadStats(
                        label="0x7f45645664",
                        own=Metric(MetricType.TIME, 0),
                        total=Metric(MetricType.TIME, 152),
                        children={
                            Frame("foo", "foo_module.py", 10):
                            FrameStats(
                                label=Frame("foo", "foo_module.py", 10),
                                own=Metric(MetricType.TIME, 152),
                                total=Metric(MetricType.TIME, 152),
                            )
                        },
                    ),
                    "0x7f45645646":
                    ThreadStats(
                        label="0x7f45645646",
                        total=Metric(MetricType.TIME, 800),
                        own=Metric(MetricType.TIME, 148),
                        children={
                            Frame("foo", "foo_module.py", 10):
                            FrameStats(
                                label=Frame("foo", "foo_module.py", 10),
                                own=Metric(MetricType.TIME, 252),
                                total=Metric(MetricType.TIME, 252),
                            ),
                            Frame("bar", "foo_module.py", 35):
                            FrameStats(
                                label=Frame("bar", "foo_module.py", 35),
                                own=Metric(MetricType.TIME, 400),
                                total=Metric(MetricType.TIME, 400),
                            ),
                        },
                    ),
                },
            )
        },
    )
Exemplo n.º 18
0
def test_sample_parser_invalid():
    with raises(InvalidSample):  # Empty
        Sample.parse("")

    with raises(InvalidSample):  # Missing Thread
        Sample.parse("foo_module.py:foo:10;bar_module.py:bar:20 42,43,-44")

    with raises(InvalidSample):  # With PID but missing Thread
        Sample.parse(
            "P123;foo_module.py:foo:10;bar_module.py:bar:20 42,43,-44")

    with raises(InvalidSample):  # Completely bonkers
        Sample.parse("snafu")

    with raises(InvalidSample):  # no metrics
        Sample.parse(
            "P1;T0x7f546684;foo_module.py:foo:10;bar_module.py:bar:20")

    with raises(InvalidSample):  # invalid frame
        Sample.parse("P1;T0x7f546684;foo_module.py:foo:10;snafu 10",
                     MetricType.TIME)

    with raises(InvalidSample):  # Invalid number of metrics
        Sample.parse("P1;T0x7f546684;foo_module.py:foo:10 10,20")

    with raises(InvalidSample):  # Too many metrics
        Sample.parse("P1;T0x7f546684;foo_module.py:foo:10 10,20,30,40")
Exemplo n.º 19
0
def test_sample_parser_valid():
    assert Sample.parse(
        "P123;T0x7f546684;foo_module.py:foo:10;bar_module.py:bar:20 42",
        MetricType.TIME)[0] == Sample(
            123,
            "0x7f546684",
            Metric(MetricType.TIME, 42),
            [
                Frame("foo", "foo_module.py", 10),
                Frame("bar", "bar_module.py", 20)
            ],
        )

    assert Sample.parse(
        "P1;T0x7f546684;foo_module.py:foo:10;bar_module.py:bar:20 42",
        MetricType.TIME)[0] == Sample(
            1,
            "0x7f546684",
            Metric(MetricType.TIME, 42),
            [
                Frame("foo", "foo_module.py", 10),
                Frame("bar", "bar_module.py", 20)
            ],
        )

    assert Sample.parse(
        "P123;T0x7f546684;foo_module.py:foo:10;bar_module.py:bar:20 42,1,-44",
    ) == [
        Sample(
            123,
            "0x7f546684",
            Metric(MetricType.TIME, 0),
            [
                Frame("foo", "foo_module.py", 10),
                Frame("bar", "bar_module.py", 20)
            ],
        ),
        Sample(
            123,
            "0x7f546684",
            Metric(MetricType.TIME, 42),
            [
                Frame("foo", "foo_module.py", 10),
                Frame("bar", "bar_module.py", 20)
            ],
        ),
        Sample(
            123,
            "0x7f546684",
            Metric(MetricType.MEMORY, 0),
            [
                Frame("foo", "foo_module.py", 10),
                Frame("bar", "bar_module.py", 20)
            ],
        ),
        Sample(
            123,
            "0x7f546684",
            Metric(MetricType.MEMORY, 44),
            [
                Frame("foo", "foo_module.py", 10),
                Frame("bar", "bar_module.py", 20)
            ],
        ),
    ]

    assert Sample.parse("P1;T0x7f546684 42,0,44") == [
        Sample(
            1,
            "0x7f546684",
            Metric(MetricType.TIME, 42),
            [],
        ),
        Sample(
            1,
            "0x7f546684",
            Metric(MetricType.TIME, 42),
            [],
        ),
        Sample(
            1,
            "0x7f546684",
            Metric(MetricType.MEMORY, 44),
            [],
        ),
        Sample(
            1,
            "0x7f546684",
            Metric(MetricType.MEMORY, 0),
            [],
        ),
    ]
Exemplo n.º 20
0
def test_speedscope_full_metrics():
    speedscope = Speedscope("austin_full_metrics", "full")
    for sample in [
            "P42;T123;foo_module.py:foo:10 10,0,-30",
            "P42;T123;foo_module.py:foo:10 10,1,20",
    ]:
        speedscope.add_samples(Sample.parse(sample))

    speedscope_data = speedscope.asdict()
    for file_field in _SPEEDSCOPE_FILE_FIELDS:
        assert file_field in speedscope_data

    assert speedscope_data["$schema"] == _SPEEDSCOPE_SCHEMA_URL
    assert speedscope_data["name"] == "austin_full_metrics"
    assert "Austin2Speedscope Converter" in speedscope_data["exporter"]

    sframe_list = speedscope_data["shared"]["frames"]
    assert len(sframe_list) == 1
    for sframe in sframe_list:
        for field in _SPEEDSCOPE_FRAME_FIELDS:
            assert field in sframe

    assert sframe_list[0]["name"] == "foo"
    assert sframe_list[0]["file"] == "foo_module.py"
    assert sframe_list[0]["line"] == 10

    sprofile_list = speedscope_data["profiles"]
    assert len(sprofile_list) == 4
    for sprofile in sprofile_list:
        for field in _SPEEDSCOPE_PROFILE_FIELDS:
            assert field in sprofile
        assert sprofile["type"] == "sampled"

    # See the comments in the test above for a discussion of why
    # Speedscope profiles appear in the order tested below.
    assert sprofile_list[0]["name"] == "CPU time profile for 42:123"
    assert sprofile_list[0]["endValue"] == 10
    assert sprofile_list[0]["unit"] == "microseconds"

    assert len(sprofile_list[0]["samples"]) == 1
    assert sprofile_list[0]["samples"] == [[0]]
    assert len(sprofile_list[0]["weights"]) == 1
    assert sprofile_list[0]["weights"] == [10]

    assert sprofile_list[1]["name"] == "Wall time profile for 42:123"
    assert sprofile_list[1]["endValue"] == 20
    assert sprofile_list[1]["unit"] == "microseconds"

    assert len(sprofile_list[1]["samples"]) == 2
    assert sprofile_list[1]["samples"] == [[0], [0]]
    assert len(sprofile_list[1]["weights"]) == 2
    assert sprofile_list[1]["weights"] == [10, 10]

    assert sprofile_list[2]["name"] == "Memory deallocation profile for 42:123"
    assert sprofile_list[2]["endValue"] == 30
    assert sprofile_list[2]["unit"] == "bytes"

    assert len(sprofile_list[2]["samples"]) == 1
    assert sprofile_list[2]["samples"] == [[0]]
    assert len(sprofile_list[2]["weights"]) == 1
    assert sprofile_list[2]["weights"] == [30]

    assert sprofile_list[3]["name"] == "Memory allocation profile for 42:123"
    assert sprofile_list[3]["endValue"] == 20
    assert sprofile_list[3]["unit"] == "bytes"

    assert len(sprofile_list[3]["samples"]) == 1
    assert sprofile_list[3]["samples"] == [[0]]
    assert len(sprofile_list[3]["weights"]) == 1
    assert sprofile_list[3]["weights"] == [20]
Exemplo n.º 21
0
def test_speedscope_full_metrics_idle():
    speedscope = Speedscope("austin_full_metrics", "full")

    # The format for each line of this array can be found in
    # austin.stats.Sample.parse.
    for sample in [
            "P42;T123;foo_module.py:foo:10 10,1,-30",
            "P42;T123;foo_module.py:foo:10 10,1,20",
    ]:
        speedscope.add_samples(Sample.parse(sample))

    speedscope_data = speedscope.asdict()
    for file_field in _SPEEDSCOPE_FILE_FIELDS:
        assert file_field in speedscope_data

    assert speedscope_data["$schema"] == _SPEEDSCOPE_SCHEMA_URL
    assert speedscope_data["name"] == "austin_full_metrics"
    assert "Austin2Speedscope Converter" in speedscope_data["exporter"]

    sframe_list = speedscope_data["shared"]["frames"]
    assert len(sframe_list) == 1
    for sframe in sframe_list:
        for field in _SPEEDSCOPE_FRAME_FIELDS:
            assert field in sframe

    assert sframe_list[0]["name"] == "foo"
    assert sframe_list[0]["file"] == "foo_module.py"
    assert sframe_list[0]["line"] == 10

    sprofile_list = speedscope_data["profiles"]
    assert len(sprofile_list) == 3
    for sprofile in sprofile_list:
        for field in _SPEEDSCOPE_PROFILE_FIELDS:
            assert field in sprofile
        assert sprofile["type"] == "sampled"
        assert sprofile["startValue"] == 0

    # The sort in SpeedscopeProfile.asdict() returns profiles in insertion
    # order because the keys for all profiles are the same: "42:123",
    # which refers to the process ID and thread ID, respectively. The
    # part of each line that controls insertion order in this particular
    # data set is the triple of numbers at the end of each line, which
    # is passed to austin.stats.Metric.parse. For consistency,
    # the entries of this triple will be referred to positionally using
    # Python indexing conventions.
    #
    # Data from this triple is passed into a four element list of
    # metrics as follows:
    #
    # metric[0] is CPU time; it is zero if triple[1] is nonzero, otherwise
    # it equals triple[0]
    #
    # metric[1] is wall clock time; it equals triple[0]
    #
    # metric[2] is memory allocation in bytes; it equals triple[2] if
    # triple[2] > 0, otherwise it equals zero
    #
    # matric[3] is memory deallocation  in bytes; it equals -triple[2]
    # if triple[2] < 0, otherwise it equals 0.
    #
    # Insertions into weight arrays are attempted in the following
    # order: (CPU time, wall clock time, memory allocation, memory
    # deallocation). Insertion logic can be found in
    # austin.format.Speedscope.add_samples and
    # austin.format.Speedscope.get_profile.
    #
    # If the value of a particular metric is zero, then nothing is inserted.
    #
    # Consequently, the insertion order is as follows: wall clock
    # time, deallocation, allocation. Weight information is never
    # inserted into the CPU time profile weight arrays in this
    # test. Weight information is, however, inserted into the CPU time
    # profile weight array in the other test in this file.
    assert sprofile_list[0]["name"] == "Wall time profile for 42:123"
    assert sprofile_list[0]["endValue"] == 20
    assert sprofile_list[0]["unit"] == "microseconds"

    assert len(sprofile_list[0]["samples"]) == 2
    assert sprofile_list[0]["samples"] == [[0], [0]]
    assert len(sprofile_list[0]["weights"]) == 2
    assert sprofile_list[0]["weights"] == [10, 10]

    assert sprofile_list[1]["name"] == "Memory deallocation profile for 42:123"
    assert sprofile_list[1]["endValue"] == 30
    assert sprofile_list[1]["unit"] == "bytes"

    assert len(sprofile_list[1]["samples"]) == 1
    assert sprofile_list[1]["samples"] == [[0]]
    assert len(sprofile_list[1]["weights"]) == 1
    assert sprofile_list[1]["weights"] == [30]

    assert sprofile_list[2]["name"] == "Memory allocation profile for 42:123"
    assert sprofile_list[2]["endValue"] == 20
    assert sprofile_list[2]["unit"] == "bytes"

    assert len(sprofile_list[2]["samples"]) == 1
    assert sprofile_list[2]["samples"] == [[0]]
    assert len(sprofile_list[2]["weights"]) == 1
    assert sprofile_list[2]["weights"] == [20]