Exemple #1
0
def test_deepcopy():
    stats = AustinStats(stats_type=AustinStatsType.WALL)

    stats.update(
        Sample.parse("P42;T0x7f45645646;foo_module.py:foo:10 152",
                     MetricType.TIME)[0])
    cloned_stats = deepcopy(stats)
    assert cloned_stats == stats
    assert cloned_stats is not stats
Exemple #2
0
def test_load():
    buffer = io.StringIO(DUMP_LOAD_SAMPLES.format(""))
    stats = AustinStats.load(buffer)
    assert stats[AustinStatsType.WALL] == AustinStats(
        stats_type=AustinStatsType.WALL,
        processes={
            42:
            ProcessStats(
                pid=42,
                threads={
                    "0x7f45645646":
                    ThreadStats(
                        label="0x7f45645646",
                        own=Metric(MetricType.TIME, 0),
                        total=Metric(MetricType.TIME, 1300),
                        children={
                            Frame(function="foo",
                                  filename="foo_module.py",
                                  line=10):
                            FrameStats(
                                label=Frame(function="foo",
                                            filename="foo_module.py",
                                            line=10),
                                own=Metric(MetricType.TIME, 300),
                                total=Metric(MetricType.TIME, 1300),
                                children={
                                    Frame(
                                        function="bar",
                                        filename="bar_sample.py",
                                        line=20,
                                    ):
                                    FrameStats(
                                        label=Frame(
                                            function="bar",
                                            filename="bar_sample.py",
                                            line=20,
                                        ),
                                        own=Metric(MetricType.TIME, 1000),
                                        total=Metric(MetricType.TIME, 1000),
                                        children={},
                                        height=1,
                                    )
                                },
                                height=0,
                            )
                        },
                    )
                },
            )
        },
    )
Exemple #3
0
    def __init__(self, *args: Any, **kwargs: Any) -> None:
        super().__init__(*args, **kwargs)

        self.ready = Event()
        self.stats = AustinStats()
        self.interval: str = "100"
        self.children = False
        self.mode: Optional[str] = None
        self.data: List[str] = []
        self.global_stats: Optional[str] = None
        self.austinfile = None
        self.tests = {}
        self.report = []
        self.report_level = "minimal"
        self.format = "austin"
Exemple #4
0
def test_dump():
    stats = AustinStats(AustinStatsType.WALL)

    EMPTY_SAMPLE = "P42;T0x7f45645646 1"
    FOO_SAMPLE = "P42;T0x7f45645646;foo_module.py:foo:10 150"
    BAR_SAMPLE = "P42;T0x7f45645646;foo_module.py:foo:10;bar_sample.py:bar:20 1000"

    stats.update(Sample.parse(FOO_SAMPLE, MetricType.TIME)[0])
    stats.update(Sample.parse(FOO_SAMPLE, MetricType.TIME)[0])
    stats.update(Sample.parse(BAR_SAMPLE, MetricType.TIME)[0])
    stats.update(Sample.parse(EMPTY_SAMPLE, MetricType.TIME)[0])

    buffer = io.StringIO()
    stats.dump(buffer)
    assert buffer.getvalue() == DUMP_LOAD_SAMPLES.format(
        "P42;T0x7f45645646 1\n")
Exemple #5
0
    def __init__(self, mode: AustinProfileMode) -> None:
        self.__dict__ = self.__borg__

        self.mode = mode

        self._samples = 0
        self._invalids = 0
        self._last_stack: Dict[str, Sample] = {}
        self._stats = AustinStats(
            AustinStatsType.MEMORY
            if mode is AustinProfileMode.MEMORY else AustinStatsType.WALL)
        self._stats.timestamp = time()

        self._austin_version: Optional[str] = None
        self._python_version: Optional[str] = None

        self._threads = OrderedSet()
Exemple #6
0
class PyTestAustin(ThreadedAustin):
    """pytest implementation of Austin."""
    def __init__(self, *args: Any, **kwargs: Any) -> None:
        super().__init__(*args, **kwargs)

        self.ready = Event()
        self.stats = AustinStats()
        self.interval: str = "100"
        self.children = False
        self.mode: Optional[str] = None
        self.data: List[str] = []
        self.global_stats: Optional[str] = None
        self.austinfile = None
        self.tests = {}
        self.report = []
        self.report_level = "minimal"
        self.format = "austin"

    def on_ready(self, process: Process, child_process: Process,
                 command_line: str) -> None:
        """Ready callback."""
        self.ready.set()

    def on_sample_received(self, sample: str) -> None:
        """Sample received callback."""
        # We collect all the samples and only parse them at the end for
        # performance
        self.data.append(sample)

    def on_terminate(self, stats: str) -> None:
        """Terminate callback."""
        self.global_stats = stats
        self.ready.set()

    def wait_ready(self, timeout: Optional[int] = None):
        """Wait for Austin to enter the ready state."""
        self.ready.wait(timeout)

    def dump(self, stream: Optional[TextIO] = None) -> None:
        """Dump the collected statistics to the given IO stream.

        If no stream is given, the data is dumped into a file prefixed with
        ``.austin_`` and followed by a truncated timestamp within the pytest
        rootdir.
        """
        if not self.data:
            return

        def _dump(filename, stream, dumper):
            if stream is None:
                with open(filename,
                          "wb" if filename.endswith("pprof") else "w") as fout:
                    dumper.dump(fout)
                    self.austinfile = os.path.join(os.getcwd(), filename)
            else:
                dumper.dump(stream)

        def _dump_austin():
            _dump(f".austin_{int((time() * 1e6) % 1e14)}.aprof", stream,
                  self.stats)

        def _dump_pprof():
            pprof = PProf()

            for line in self.data:
                try:
                    pprof.add_sample(Sample.parse(line))
                except InvalidSample:
                    continue

            _dump(f".austin_{int((time() * 1e6) % 1e14)}.pprof", stream, pprof)

        def _dump_speedscope():
            name = f"austin_{int((time() * 1e6) % 1e14)}"
            speedscope = Speedscope(name)

            for line in self.data:
                try:
                    speedscope.add_sample(Sample.parse(line))
                except InvalidSample:
                    continue

            _dump(f".{name}.json", stream, speedscope)

        {
            "austin": _dump_austin,
            "pprof": _dump_pprof,
            "speedscope": _dump_speedscope
        }[self.format]()

    @lru_cache()
    def _index(self) -> Dict[str, Dict[str, FrameStats]]:
        # TODO: This code can be optimised. If we collect all the test items we
        # can index up to the test functions. Then we keep indexing whenever
        # we are checking eaech marked test.

        def _add_child_stats(stats: FrameStats,
                             index: Dict[str, Dict[str, FrameStats]]) -> None:
            """Build an index of all the functions in all the modules recursively."""
            for frame, stats in stats.children.items():
                index.setdefault(frame.function,
                                 {}).setdefault(frame.filename,
                                                []).append(stats)

                _add_child_stats(stats, index)

        index = {}

        for _, process in self.stats.processes.items():
            for _, thread in process.threads.items():
                _add_child_stats(thread, index)

        return index

    def register_test(self, function: str, module: str,
                      markers: Iterator) -> None:
        """Register a test with pytest-austin.

        We pass the test item name and module together with any markers.
        """
        for marker in markers:
            try:
                marker_function = getattr(_markers, marker.name)
            except AttributeError:
                continue

            arg_names = marker_function.__code__.co_varnames[
                1:marker_function.__code__.co_argcount]
            defaults = marker_function.__defaults__ or []

            marker_args = {
                a: v
                for a, v in zip(arg_names[-len(defaults):], defaults)
            }
            marker_args.update(marker.kwargs)
            marker_args.update({k: v for k, v in zip(arg_names, marker.args)})

            self.tests.setdefault(function, {}).setdefault(module, []).append(
                marker_function((self, function, module), **marker_args))

    def _find_test(self, function: str, module: str) -> Optional[FrameStats]:
        # We expect to find at most one test
        # TODO: Match function by regex
        module_map = self._index().get(function, None)
        if module_map is None:
            return None

        matches = [module_map[k]
                   for k in module_map if k.endswith(module)] + [None]
        if len(matches) > 2:
            RuntimeError(
                f"Test item {function} occurs in many matching modules.")

        return matches[0]

    def check_tests(self) -> int:
        """Check all the registered tests against the collected statistics.

        Returns the number of failed checks.
        """
        if self.is_running():
            raise RuntimeError("Austin is still running.")

        if not self.data:
            return

        # Prepare stats
        for sample in self.data:
            try:
                self.stats.update(Sample.parse(sample))
            except InvalidSample:
                pass

        for function, modules in self.tests.items():
            for module, markers in modules.items():
                test_stats = self._find_test(function, module)
                if test_stats is None:
                    # The test was not found. Either there is no such test or
                    # Austin did not collect any statistics for it.
                    continue

                total_test_time = sum(fs.total.time for fs in test_stats)
                total_test_malloc = sum(fs.total.time if self.mode ==
                                        "-m" else fs.total.memory_alloc
                                        for fs in test_stats)
                total_test_dealloc = (sum(
                    fs.total.memory_dealloc
                    for fs in test_stats) if self.mode == "-f" else 0)

                for marker in markers:
                    outcome = marker(
                        test_stats,
                        total_test_time,
                        total_test_malloc,
                        total_test_dealloc,
                    )
                    self.report.append((function, module, outcome))

        return sum(1 for outcome in self.report if not outcome[2])

    def start(self) -> None:
        """Start Austin."""
        args = ["-t", "10", "-i", self.interval, "-p", str(os.getpid())]
        if self.mode:
            args.append(self.mode)
        if self.children:
            args.append("-C")

        super().start(args)
Exemple #7
0
def test_austin_stats_single_process():
    stats = AustinStats(stats_type=AustinStatsType.WALL)

    stats.update(
        Sample.parse("P42;T0x7f45645646;foo_module.py:foo:10 152",
                     MetricType.TIME)[0])
    assert stats == AustinStats(
        stats_type=AustinStatsType.WALL,
        processes={
            42:
            ProcessStats(
                pid=42,
                threads={
                    "0x7f45645646":
                    ThreadStats(
                        label="0x7f45645646",
                        own=Metric(MetricType.TIME, 0),
                        total=Metric(MetricType.TIME, 152),
                        children={
                            Frame("foo", "foo_module.py", 10):
                            FrameStats(
                                label=Frame("foo", "foo_module.py", 10),
                                own=Metric(MetricType.TIME, 152),
                                total=Metric(MetricType.TIME, 152),
                            )
                        },
                    )
                },
            )
        },
    )

    stats.update(Sample.parse("P42;T0x7f45645646 148", MetricType.TIME)[0])
    assert stats == AustinStats(
        stats_type=AustinStatsType.WALL,
        processes={
            42:
            ProcessStats(
                pid=42,
                threads={
                    "0x7f45645646":
                    ThreadStats(
                        label="0x7f45645646",
                        total=Metric(MetricType.TIME, 300),
                        own=Metric(MetricType.TIME, 148),
                        children={
                            Frame("foo", "foo_module.py", 10):
                            FrameStats(
                                label=Frame("foo", "foo_module.py", 10),
                                own=Metric(MetricType.TIME, 152),
                                total=Metric(MetricType.TIME, 152),
                            )
                        },
                    )
                },
            )
        },
    )

    stats.update(
        Sample.parse("P42;T0x7f45645646;foo_module.py:foo:10 100",
                     MetricType.TIME)[0])
    assert stats == AustinStats(
        stats_type=AustinStatsType.WALL,
        processes={
            42:
            ProcessStats(
                pid=42,
                threads={
                    "0x7f45645646":
                    ThreadStats(
                        label="0x7f45645646",
                        total=Metric(MetricType.TIME, 400),
                        own=Metric(MetricType.TIME, 148),
                        children={
                            Frame("foo", "foo_module.py", 10):
                            FrameStats(
                                label=Frame("foo", "foo_module.py", 10),
                                own=Metric(MetricType.TIME, 252),
                                total=Metric(MetricType.TIME, 252),
                            )
                        },
                    )
                },
            )
        },
    )

    stats.update(
        Sample.parse("P42;T0x7f45645646;foo_module.py:bar:35 400",
                     MetricType.TIME)[0])
    assert stats == AustinStats(
        stats_type=AustinStatsType.WALL,
        processes={
            42:
            ProcessStats(
                pid=42,
                threads={
                    "0x7f45645646":
                    ThreadStats(
                        label="0x7f45645646",
                        total=Metric(MetricType.TIME, 800),
                        own=Metric(MetricType.TIME, 148),
                        children={
                            Frame("foo", "foo_module.py", 10):
                            FrameStats(
                                label=Frame("foo", "foo_module.py", 10),
                                own=Metric(MetricType.TIME, 252),
                                total=Metric(MetricType.TIME, 252),
                            ),
                            Frame("bar", "foo_module.py", 35):
                            FrameStats(
                                label=Frame("bar", "foo_module.py", 35),
                                own=Metric(MetricType.TIME, 400),
                                total=Metric(MetricType.TIME, 400),
                            ),
                        },
                    )
                },
            )
        },
    )

    stats.update(
        Sample.parse("P42;T0x7f45645664;foo_module.py:foo:10 152",
                     MetricType.TIME)[0])
    assert stats == AustinStats(
        stats_type=AustinStatsType.WALL,
        processes={
            42:
            ProcessStats(
                pid=42,
                threads={
                    "0x7f45645664":
                    ThreadStats(
                        label="0x7f45645664",
                        own=Metric(MetricType.TIME, 0),
                        total=Metric(MetricType.TIME, 152),
                        children={
                            Frame("foo", "foo_module.py", 10):
                            FrameStats(
                                label=Frame("foo", "foo_module.py", 10),
                                own=Metric(MetricType.TIME, 152),
                                total=Metric(MetricType.TIME, 152),
                            )
                        },
                    ),
                    "0x7f45645646":
                    ThreadStats(
                        label="0x7f45645646",
                        total=Metric(MetricType.TIME, 800),
                        own=Metric(MetricType.TIME, 148),
                        children={
                            Frame("foo", "foo_module.py", 10):
                            FrameStats(
                                label=Frame("foo", "foo_module.py", 10),
                                own=Metric(MetricType.TIME, 252),
                                total=Metric(MetricType.TIME, 252),
                            ),
                            Frame("bar", "foo_module.py", 35):
                            FrameStats(
                                label=Frame("bar", "foo_module.py", 35),
                                own=Metric(MetricType.TIME, 400),
                                total=Metric(MetricType.TIME, 400),
                            ),
                        },
                    ),
                },
            )
        },
    )
Exemple #8
0
class AustinModel(Model):
    """Austin model.

    This is a borg.
    """

    __borg__: Dict[str, Any] = {}

    def __init__(self, mode: AustinProfileMode) -> None:
        self.__dict__ = self.__borg__

        self.mode = mode

        self._samples = 0
        self._invalids = 0
        self._last_stack: Dict[str, Sample] = {}
        self._stats = AustinStats(
            AustinStatsType.MEMORY
            if mode is AustinProfileMode.MEMORY else AustinStatsType.WALL)
        self._stats.timestamp = time()

        self._austin_version: Optional[str] = None
        self._python_version: Optional[str] = None

        self._threads = OrderedSet()

    def get_versions(self) -> Tuple[Optional[str], Optional[str]]:
        """Get Austin and Python versions."""
        return self._austin_version, self._python_version

    def set_versions(self, austin_version: str, python_version: str) -> None:
        """Set Austin and Python versions."""
        self._austin_version = austin_version
        self._python_version = python_version

    def update(self, raw_sample: str) -> None:
        """Update current statistics with a new sample."""
        try:
            (sample, ) = Sample.parse(
                raw_sample,
                MetricType.MEMORY
                if self.mode is AustinProfileMode.MEMORY else MetricType.TIME,
            )
            self._stats.update(sample)
            self._stats.timestamp = time()
            thread_key = f"{sample.pid}:{sample.thread}"
            self._last_stack[thread_key] = sample
            self._threads.add(thread_key)
        except InvalidSample:
            self._invalids += 1
        finally:
            self._samples += 1

    def get_last_stack(self, thread_key: str) -> Sample:
        """Get the last seen stack for the given thread."""
        return self._last_stack[thread_key]

    @property
    def stats(self) -> AustinStats:
        """The current Austin statistics."""
        return self._stats

    @property
    def threads(self) -> OrderedSet:
        """The seen threads as ordered set."""
        return self._threads

    @property
    def samples_count(self) -> int:
        """Get the sample count."""
        return self._samples

    @property
    def error_rate(self) -> float:
        """Get the error rate."""
        return self._invalids / self._samples