Пример #1
0
 def test_erasing_parallel(self):
     self.make_file("datafile.1")
     self.make_file("datafile.2")
     self.make_file(".coverage")
     data_files = CoverageDataFiles("datafile")
     data_files.erase(parallel=True)
     self.assert_file_count("datafile.*", 0)
     self.assert_exists(".coverage")
Пример #2
0
 def test_debug_data_with_no_data(self):
     data_files = CoverageDataFiles()
     self.command_line("debug data")
     self.assertMultiLineEqual(self.stdout(), textwrap.dedent("""\
         -- data ------------------------------------------------------
         path: FILENAME
         No data collected
         """).replace("FILENAME", data_files.filename))
Пример #3
0
    def test_writing_to_other_file(self):
        data_files = CoverageDataFiles(".otherfile")
        covdata = CoverageData()
        covdata.add_lines(LINES_1)
        data_files.write(covdata)
        self.assert_doesnt_exist(".coverage")
        self.assert_exists(".otherfile")

        data_files.write(covdata, suffix="extra")
        self.assert_exists(".otherfile.extra")
        self.assert_doesnt_exist(".coverage")
Пример #4
0
    def test_debug_data(self):
        data = CoverageData()
        data.add_lines({
            "file1.py": dict.fromkeys(range(1, 18)),
            "file2.py": dict.fromkeys(range(1, 24)),
        })
        data.add_file_tracers({"file1.py": "a_plugin"})
        data_files = CoverageDataFiles()
        data_files.write(data)

        self.command_line("debug data")
        self.assertMultiLineEqual(self.stdout(), textwrap.dedent("""\
            -- data ------------------------------------------------------
            path: FILENAME
            has_arcs: False

            2 files:
            file1.py: 17 lines [a_plugin]
            file2.py: 23 lines
            """).replace("FILENAME", data_files.filename))
Пример #5
0
def combine(data_paths, output_file):
    try:
        if CoverageData.combine_parallel_data:
            # use the old API, version 3.6 and 3.7.X
            data = CoverageData(output_file)
            data = coverage3x_combine(data_paths, data)
            data.write()
    except AttributeError:
        # new versions have better support for combining files
        # and the method combine_parallel_data() has been moved
        # to teh new CoverageDataFiles class.
        # see https://bitbucket.org/ned/coveragepy/pull-requests/62
        from coverage.data import CoverageDataFiles

        data = CoverageData()

        dataf = CoverageDataFiles()
        dataf.combine_parallel_data(data, data_paths=data_paths)

        data.write_file(output_file)
Пример #6
0
    def _init(self):
        """Set all the initial state.

        This is called by the public methods to initialize state. This lets us
        construct a :class:`Coverage` object, then tweak its state before this
        function is called.

        """
        if self._inited:
            return

        # Create and configure the debugging controller. COVERAGE_DEBUG_FILE
        # is an environment variable, the name of a file to append debug logs
        # to.
        if self._debug_file is None:
            debug_file_name = os.environ.get("COVERAGE_DEBUG_FILE")
            if debug_file_name:
                self._debug_file = open(debug_file_name, "a")
            else:
                self._debug_file = sys.stderr
        self.debug = DebugControl(self.config.debug, self._debug_file)

        # Load plugins
        self.plugins = Plugins.load_plugins(self.config.plugins, self.config,
                                            self.debug)

        # _exclude_re is a dict that maps exclusion list names to compiled
        # regexes.
        self._exclude_re = {}
        self._exclude_regex_stale()

        files.set_relative_directory()

        # The source argument can be directories or package names.
        self.source = []
        self.source_pkgs = []
        for src in self.config.source or []:
            if os.path.exists(src):
                self.source.append(files.canonical_filename(src))
            else:
                self.source_pkgs.append(src)

        self.omit = prep_patterns(self.config.omit)
        self.include = prep_patterns(self.config.include)

        concurrency = self.config.concurrency
        if concurrency == "multiprocessing":
            patch_multiprocessing()
            concurrency = None

        self.collector = Collector(
            should_trace=self._should_trace,
            check_include=self._check_include_omit_etc,
            timid=self.config.timid,
            branch=self.config.branch,
            warn=self._warn,
            concurrency=concurrency,
        )

        # Early warning if we aren't going to be able to support plugins.
        if self.plugins.file_tracers and not self.collector.supports_plugins:
            self._warn("Plugin file tracers (%s) aren't supported with %s" % (
                ", ".join(plugin._coverage_plugin_name
                          for plugin in self.plugins.file_tracers),
                self.collector.tracer_name(),
            ))
            for plugin in self.plugins.file_tracers:
                plugin._coverage_enabled = False

        # Suffixes are a bit tricky.  We want to use the data suffix only when
        # collecting data, not when combining data.  So we save it as
        # `self.run_suffix` now, and promote it to `self.data_suffix` if we
        # find that we are collecting data later.
        if self._data_suffix or self.config.parallel:
            if not isinstance(self._data_suffix, string_class):
                # if data_suffix=True, use .machinename.pid.random
                self._data_suffix = True
        else:
            self._data_suffix = None
        self.data_suffix = None
        self.run_suffix = self._data_suffix

        # Create the data file.  We do this at construction time so that the
        # data file will be written into the directory where the process
        # started rather than wherever the process eventually chdir'd to.
        self.data = CoverageData(debug=self.debug)
        self.data_files = CoverageDataFiles(basename=self.config.data_file)

        # The directories for files considered "installed with the interpreter".
        self.pylib_dirs = set()
        if not self.config.cover_pylib:
            # Look at where some standard modules are located. That's the
            # indication for "installed with the interpreter". In some
            # environments (virtualenv, for example), these modules may be
            # spread across a few locations. Look at all the candidate modules
            # we've imported, and take all the different ones.
            for m in (atexit, inspect, os, platform, _structseq, traceback):
                if m is not None and hasattr(m, "__file__"):
                    self.pylib_dirs.add(self._canonical_dir(m))
            if _structseq and not hasattr(_structseq, '__file__'):
                # PyPy 2.4 has no __file__ in the builtin modules, but the code
                # objects still have the file names.  So dig into one to find
                # the path to exclude.
                structseq_new = _structseq.structseq_new
                try:
                    structseq_file = structseq_new.func_code.co_filename
                except AttributeError:
                    structseq_file = structseq_new.__code__.co_filename
                self.pylib_dirs.add(self._canonical_dir(structseq_file))

        # To avoid tracing the coverage.py code itself, we skip anything
        # located where we are.
        self.cover_dirs = [self._canonical_dir(__file__)]
        if env.TESTING:
            # When testing, we use PyContracts, which should be considered
            # part of coverage.py, and it uses six. Exclude those directories
            # just as we exclude ourselves.
            import contracts, six
            for mod in [contracts, six]:
                self.cover_dirs.append(self._canonical_dir(mod))

        # Set the reporting precision.
        Numbers.set_precision(self.config.precision)

        atexit.register(self._atexit)

        self._inited = True

        # Create the matchers we need for _should_trace
        if self.source or self.source_pkgs:
            self.source_match = TreeMatcher(self.source)
            self.source_pkgs_match = ModuleMatcher(self.source_pkgs)
        else:
            if self.cover_dirs:
                self.cover_match = TreeMatcher(self.cover_dirs)
            if self.pylib_dirs:
                self.pylib_match = TreeMatcher(self.pylib_dirs)
        if self.include:
            self.include_match = FnmatchMatcher(self.include)
        if self.omit:
            self.omit_match = FnmatchMatcher(self.omit)

        # The user may want to debug things, show info if desired.
        wrote_any = False
        if self.debug.should('config'):
            config_info = sorted(self.config.__dict__.items())
            self.debug.write_formatted_info("config", config_info)
            wrote_any = True

        if self.debug.should('sys'):
            self.debug.write_formatted_info("sys", self.sys_info())
            for plugin in self.plugins:
                header = "sys: " + plugin._coverage_plugin_name
                info = plugin.sys_info()
                self.debug.write_formatted_info(header, info)
            wrote_any = True

        if wrote_any:
            self.debug.write_formatted_info("end", ())
Пример #7
0
    def _init(self):
        """Set all the initial state.

        This is called by the public methods to initialize state. This lets us
        construct a :class:`Coverage` object, then tweak its state before this
        function is called.

        """
        if self._inited:
            return

        self._inited = True

        # Create and configure the debugging controller. COVERAGE_DEBUG_FILE
        # is an environment variable, the name of a file to append debug logs
        # to.
        if self._debug_file is None:
            debug_file_name = os.environ.get("COVERAGE_DEBUG_FILE")
            if debug_file_name:
                self._debug_file = open(debug_file_name, "a")
            else:
                self._debug_file = sys.stderr
        self._debug = DebugControl(self.config.debug, self._debug_file)

        # _exclude_re is a dict that maps exclusion list names to compiled regexes.
        self._exclude_re = {}

        set_relative_directory()

        # Load plugins
        self._plugins = Plugins.load_plugins(self.config.plugins, self.config,
                                             self._debug)

        # Run configuring plugins.
        for plugin in self._plugins.configurers:
            # We need an object with set_option and get_option. Either self or
            # self.config will do. Choosing randomly stops people from doing
            # other things with those objects, against the public API.  Yes,
            # this is a bit childish. :)
            plugin.configure([self, self.config][int(time.time()) % 2])

        concurrency = self.config.concurrency or []
        if "multiprocessing" in concurrency:
            if not patch_multiprocessing:
                raise CoverageException(  # pragma: only jython
                    "multiprocessing is not supported on this Python")
            patch_multiprocessing(rcfile=self.config.config_file)
            # Multi-processing uses parallel for the subprocesses, so also use
            # it for the main process.
            self.config.parallel = True

        self._collector = Collector(
            should_trace=self._should_trace,
            check_include=self._check_include_omit_etc,
            timid=self.config.timid,
            branch=self.config.branch,
            warn=self._warn,
            concurrency=concurrency,
        )

        # Early warning if we aren't going to be able to support plugins.
        if self._plugins.file_tracers and not self._collector.supports_plugins:
            self._warn("Plugin file tracers (%s) aren't supported with %s" % (
                ", ".join(plugin._coverage_plugin_name
                          for plugin in self._plugins.file_tracers),
                self._collector.tracer_name(),
            ))
            for plugin in self._plugins.file_tracers:
                plugin._coverage_enabled = False

        # Create the file classifying substructure.
        self._inorout = self._inorout_class(warn=self._warn)
        self._inorout.configure(self.config)
        self._inorout.plugins = self._plugins
        self._inorout.disp_class = self._collector.file_disposition_class

        # Suffixes are a bit tricky.  We want to use the data suffix only when
        # collecting data, not when combining data.  So we save it as
        # `self._run_suffix` now, and promote it to `self._data_suffix` if we
        # find that we are collecting data later.
        if self._data_suffix_specified or self.config.parallel:
            if not isinstance(self._data_suffix_specified, string_class):
                # if data_suffix=True, use .machinename.pid.random
                self._data_suffix_specified = True
        else:
            self._data_suffix_specified = None
        self._data_suffix = None
        self._run_suffix = self._data_suffix_specified

        # Create the data file.  We do this at construction time so that the
        # data file will be written into the directory where the process
        # started rather than wherever the process eventually chdir'd to.
        self._data = CoverageData(debug=self._debug)
        self._data_files = CoverageDataFiles(
            basename=self.config.data_file,
            warn=self._warn,
            debug=self._debug,
        )

        # Set the reporting precision.
        Numbers.set_precision(self.config.precision)

        atexit.register(self._atexit)

        # The user may want to debug things, show info if desired.
        self._write_startup_debug()
Пример #8
0
 def setUp(self):
     super(CoverageDataFilesTest, self).setUp()
     self.data_files = CoverageDataFiles()