Esempio n. 1
0
    def try_multiprocessing_code(
        self, code, expected_out, the_module, nprocs, concurrency="multiprocessing", args=""
    ):
        """Run code using multiprocessing, it should produce `expected_out`."""
        self.make_file("multi.py", code)
        self.make_file(".coveragerc", """\
            [run]
            concurrency = %s
            source = .
            """ % concurrency)

        for start_method in ["fork", "spawn"]:
            if start_method and start_method not in multiprocessing.get_all_start_methods():
                continue

            remove_files(".coverage", ".coverage.*")
            cmd = "coverage run {args} multi.py {start_method}".format(
                args=args, start_method=start_method,
            )
            out = self.run_command(cmd)
            expected_cant_trace = cant_trace_msg(concurrency, the_module)

            if expected_cant_trace is not None:
                assert out == expected_cant_trace
            else:
                assert out.rstrip() == expected_out
                assert len(glob.glob(".coverage.*")) == nprocs + 1

                out = self.run_command("coverage combine")
                assert out == ""
                out = self.run_command("coverage report -m")

                last_line = self.squeezed_lines(out)[-1]
                assert re.search(r"TOTAL \d+ 0 100%", last_line)
Esempio n. 2
0
    def clean_local_file_imports(self):
        """Clean up the results of calls to `import_local_file`.

        Use this if you need to `import_local_file` the same file twice in
        one test.

        """
        # So that we can re-import files, clean them out first.
        self._sys_module_saver.restore()

        # Also have to clean out the .pyc file, since the timestamp
        # resolution is only one second, a changed file might not be
        # picked up.
        remove_files("*.pyc", "*$py.class")
        remove_tree("__pycache__")
        importlib.invalidate_caches()
    def try_multiprocessing_code(self,
                                 code,
                                 expected_out,
                                 the_module,
                                 nprocs,
                                 concurrency="multiprocessing",
                                 args=""):
        """Run code using multiprocessing, it should produce `expected_out`."""
        self.make_file("multi.py", code)
        self.make_file(
            ".coveragerc", """\
            [run]
            concurrency = %s
            source = .
            """ % concurrency)

        if env.PYVERSION >= (3, 4):
            start_methods = ['fork', 'spawn']
        else:
            start_methods = ['']

        for start_method in start_methods:
            if start_method and start_method not in multiprocessing.get_all_start_methods(
            ):
                continue

            remove_files(".coverage", ".coverage.*")
            cmd = "coverage run {args} multi.py {start_method}".format(
                args=args,
                start_method=start_method,
            )
            out = self.run_command(cmd)
            expected_cant_trace = cant_trace_msg(concurrency, the_module)

            if expected_cant_trace is not None:
                self.assertEqual(out, expected_cant_trace)
            else:
                self.assertEqual(out.rstrip(), expected_out)
                self.assertEqual(len(glob.glob(".coverage.*")), nprocs + 1)

                out = self.run_command("coverage combine")
                self.assertEqual(out, "")
                out = self.run_command("coverage report -m")

                last_line = self.squeezed_lines(out)[-1]
                self.assertRegex(last_line, r"multi.py \d+ 0 100%")