Exemple #1
0
    def add_file_tracers(self, file_tracers):
        """Add per-file plugin information.

        `file_tracers` is { filename: plugin_name, ... }

        """
        if self._debug.should('dataop'):
            self._debug.write("Adding file tracers: %d files" %
                              (len(file_tracers), ))
        if not file_tracers:
            return
        self._start_using()
        with self._connect() as con:
            for filename, plugin_name in file_tracers.items():
                file_id = self._file_id(filename)
                if file_id is None:
                    raise CoverageException(
                        f"Can't add file tracer data for unmeasured file '{filename}'"
                    )

                existing_plugin = self.file_tracer(filename)
                if existing_plugin:
                    if existing_plugin != plugin_name:
                        raise CoverageException(
                            "Conflicting file tracer name for '{}': {!r} vs {!r}"
                            .format(
                                filename,
                                existing_plugin,
                                plugin_name,
                            ))
                elif plugin_name:
                    con.execute(
                        "insert into tracer (file_id, tracer) values (?, ?)",
                        (file_id, plugin_name))
Exemple #2
0
    def get_option(self, option_name):
        """Get an option from the configuration.

        `option_name` is a colon-separated string indicating the section and
        option name.  For example, the ``branch`` option in the ``[run]``
        section of the config file would be indicated with `"run:branch"`.

        Returns the value of the option.

        """
        # Special-cased options.
        if option_name == "paths":
            return self.paths

        # Check all the hard-coded options.
        for option_spec in self.CONFIG_FILE_OPTIONS:
            attr, where = option_spec[:2]
            if where == option_name:
                return getattr(self, attr)

        # See if it's a plugin option.
        plugin_name, _, key = option_name.partition(":")
        if key and plugin_name in self.plugins:
            return self.plugin_options.get(plugin_name, {}).get(key)

        # If we get here, we didn't find the option.
        raise CoverageException(f"No such option: {option_name!r}")
Exemple #3
0
    def report(self, morfs):
        """Generate an HTML report for `morfs`.

        `morfs` is a list of modules or file names.

        """
        # Read the status data and check that this run used the same
        # global data as the last run.
        self.incr.read()
        self.incr.check_global_data(self.config, self.pyfile_html_source)

        # Process all the files.
        for fr, analysis in get_analysis_to_report(self.coverage, morfs):
            self.html_file(fr, analysis)

        if not self.all_files_nums:
            raise CoverageException("No data to report.")

        self.totals = sum(self.all_files_nums)

        # Write the index file.
        self.index_file()

        self.make_local_static_report_files()
        return self.totals.n_statements and self.totals.pc_covered
Exemple #4
0
    def _connect(self):
        """Connect to the db and do universal initialization."""
        if self.con is not None:
            return

        # It can happen that Python switches threads while the tracer writes
        # data. The second thread will also try to write to the data,
        # effectively causing a nested context. However, given the idempotent
        # nature of the tracer operations, sharing a connection among threads
        # is not a problem.
        if self.debug:
            self.debug.write(f"Connecting to {self.filename!r}")
        try:
            self.con = sqlite3.connect(self.filename, check_same_thread=False)
        except sqlite3.Error as exc:
            raise CoverageException(
                f"Couldn't use data file {self.filename!r}: {exc}") from exc

        self.con.create_function("REGEXP", 2, _regexp)

        # This pragma makes writing faster. It disables rollbacks, but we never need them.
        # PyPy needs the .close() calls here, or sqlite gets twisted up:
        # https://bitbucket.org/pypy/pypy/issues/2872/default-isolation-mode-is-different-on
        self.execute("pragma journal_mode=off").close()
        # This pragma makes writing faster.
        self.execute("pragma synchronous=off").close()
Exemple #5
0
    def load_plugins(cls, modules, config, debug=None):
        """Load plugins from `modules`.

        Returns a Plugins object with the loaded and configured plugins.

        """
        plugins = cls()
        plugins.debug = debug

        for module in modules:
            plugins.current_module = module
            __import__(module)
            mod = sys.modules[module]

            coverage_init = getattr(mod, "coverage_init", None)
            if not coverage_init:
                raise CoverageException(
                    "Plugin module %r didn't define a coverage_init function" %
                    module)

            options = config.get_plugin_options(module)
            coverage_init(plugins, options)

        plugins.current_module = None
        return plugins
Exemple #6
0
def data_filename(fname, pkgdir=""):
    """Return the path to a data file of ours.

    The file is searched for on `STATIC_PATH`, and the first place it's found,
    is returned.

    Each directory in `STATIC_PATH` is searched as-is, and also, if `pkgdir`
    is provided, at that sub-directory.

    """
    tried = []
    for static_dir in STATIC_PATH:
        static_filename = os.path.join(static_dir, fname)
        if os.path.exists(static_filename):
            return static_filename
        else:
            tried.append(static_filename)
        if pkgdir:
            static_filename = os.path.join(static_dir, pkgdir, fname)
            if os.path.exists(static_filename):
                return static_filename
            else:
                tried.append(static_filename)
    raise CoverageException(
        f"Couldn't find static file {fname!r} from {os.getcwd()!r}, tried: {tried!r}"
    )
Exemple #7
0
    def _start_tracer(self):
        """Start a new Tracer object, and store it in self.tracers."""
        tracer = self._trace_class()
        tracer.data = self.data
        tracer.trace_arcs = self.branch
        tracer.should_trace = self.should_trace
        tracer.should_trace_cache = self.should_trace_cache
        tracer.warn = self.warn

        if hasattr(tracer, 'concur_id_func'):
            tracer.concur_id_func = self.concur_id_func
        elif self.concur_id_func:
            raise CoverageException(
                "Can't support concurrency={} with {}, only threads are supported"
                .format(
                    self.concurrency,
                    self.tracer_name(),
                ))

        if hasattr(tracer, 'file_tracers'):
            tracer.file_tracers = self.file_tracers
        if hasattr(tracer, 'threading'):
            tracer.threading = self.threading
        if hasattr(tracer, 'check_include'):
            tracer.check_include = self.check_include
        if hasattr(tracer, 'should_start_context'):
            tracer.should_start_context = self.should_start_context
            tracer.switch_context = self.switch_context
        if hasattr(tracer, 'disable_plugin'):
            tracer.disable_plugin = self.disable_plugin

        fn = tracer.start()
        self.tracers.append(tracer)

        return fn
Exemple #8
0
 def execute(self, sql, parameters=()):
     """Same as :meth:`python:sqlite3.Connection.execute`."""
     if self.debug:
         tail = f" with {parameters!r}" if parameters else ""
         self.debug.write(f"Executing {sql!r}{tail}")
     try:
         try:
             return self.con.execute(sql, parameters)
         except Exception:
             # In some cases, an error might happen that isn't really an
             # error.  Try again immediately.
             # https://github.com/nedbat/coveragepy/issues/1010
             return self.con.execute(sql, parameters)
     except sqlite3.Error as exc:
         msg = str(exc)
         try:
             # `execute` is the first thing we do with the database, so try
             # hard to provide useful hints if something goes wrong now.
             with open(self.filename, "rb") as bad_file:
                 cov4_sig = b"!coverage.py: This is a private format"
                 if bad_file.read(len(cov4_sig)) == cov4_sig:
                     msg = ("Looks like a coverage 4.x data file. "
                            "Are you mixing versions of coverage?")
         except Exception:
             pass
         if self.debug:
             self.debug.write(f"EXCEPTION from execute: {msg}")
         raise CoverageException(
             f"Couldn't use data file {self.filename!r}: {msg}") from exc
Exemple #9
0
    def loads(self, data):
        """Deserialize data from :meth:`dumps`.

        Use with a newly-created empty :class:`CoverageData` object.  It's
        undefined what happens if the object already has data in it.

        Note that this is not for reading data from a coverage data file.  It
        is only for use on data you produced with :meth:`dumps`.

        Arguments:
            data: A byte string of serialized data produced by :meth:`dumps`.

        .. versionadded:: 5.0

        """
        if self._debug.should('dataio'):
            self._debug.write(
                f"Loading data into data file {self._filename!r}")
        if data[:1] != b'z':
            raise CoverageException(
                f"Unrecognized serialization: {data[:40]!r} (head of {len(data)} bytes)"
            )
        script = zlib.decompress(data[1:]).decode("utf8")
        self._dbs[threading.get_ident()] = db = SqliteDb(
            self._filename, self._debug)
        with db:
            db.executescript(script)
        self._read_db()
        self._have_used = True
Exemple #10
0
 def _choose_lines_or_arcs(self, lines=False, arcs=False):
     """Force the data file to choose between lines and arcs."""
     assert lines or arcs
     assert not (lines and arcs)
     if lines and self._has_arcs:
         raise CoverageException(
             "Can't add line measurements to existing branch data")
     if arcs and self._has_lines:
         raise CoverageException(
             "Can't add branch measurements to existing line data")
     if not self._has_arcs and not self._has_lines:
         self._has_lines = lines
         self._has_arcs = arcs
         with self._connect() as con:
             con.execute("insert into meta (key, value) values (?, ?)",
                         ('has_arcs', str(int(arcs))))
Exemple #11
0
    def read(self, filenames):
        # RawConfigParser takes a filename or list of filenames, but we only
        # ever call this with a single filename.
        assert isinstance(filenames, (bytes, str, os.PathLike))
        filename = os.fspath(filenames)

        try:
            with open(filename, encoding='utf-8') as fp:
                toml_text = fp.read()
        except OSError:
            return []
        if tomli is not None:
            toml_text = substitute_variables(toml_text, os.environ)
            try:
                self.data = tomli.loads(toml_text)
            except tomli.TOMLDecodeError as err:
                raise TomlDecodeError(str(err)) from err
            return [filename]
        else:
            has_toml = re.search(r"^\[tool\.coverage\.", toml_text, flags=re.MULTILINE)
            if self.our_file or has_toml:
                # Looks like they meant to read TOML, but we can't read it.
                msg = "Can't read {!r} without TOML support. Install with [toml] extra"
                raise CoverageException(msg.format(filename))
            return []
Exemple #12
0
 def getregexlist(self, section, option):
     name, values = self._get(section, option)
     self._check_type(name, option, values, list, "a list")
     for value in values:
         value = value.strip()
         try:
             re.compile(value)
         except re.error as e:
             raise CoverageException(f"Invalid [{name}].{option} value {value!r}: {e}") from e
     return values
Exemple #13
0
 def __exit__(self, exc_type, exc_value, traceback):
     self.nest -= 1
     if self.nest == 0:
         try:
             self.con.__exit__(exc_type, exc_value, traceback)
             self.close()
         except Exception as exc:
             if self.debug:
                 self.debug.write(f"EXCEPTION from __exit__: {exc}")
             raise CoverageException(
                 f"Couldn't end data file {self.filename!r}: {exc}"
             ) from exc
Exemple #14
0
 def dollar_replace(match):
     """Called for each $replacement."""
     # Only one of the groups will have matched, just get its text.
     word = next(g for g in match.group('dollar', 'word1', 'word2') if g)
     if word == "$":
         return "$"
     elif word in variables:
         return variables[word]
     elif match.group('strict'):
         msg = f"Variable {word} is undefined: {text!r}"
         raise CoverageException(msg)
     else:
         return match.group('defval')
Exemple #15
0
def source_for_morf(morf):
    """Get the source filename for the module-or-file `morf`."""
    if hasattr(morf, '__file__') and morf.__file__:
        filename = morf.__file__
    elif isinstance(morf, types.ModuleType):
        # A module should have had .__file__, otherwise we can't use it.
        # This could be a PEP-420 namespace package.
        raise CoverageException(f"Module {morf} has no file")
    else:
        filename = morf

    filename = source_for_file(filename)
    return filename
Exemple #16
0
    def _read_db(self):
        """Read the metadata from a database so that we are ready to use it."""
        with self._dbs[threading.get_ident()] as db:
            try:
                schema_version, = db.execute_one(
                    "select version from coverage_schema")
            except Exception as exc:
                raise CoverageException(
                    "Data file {!r} doesn't seem to be a coverage data file: {}"
                    .format(self._filename, exc)) from exc
            else:
                if schema_version != SCHEMA_VERSION:
                    raise CoverageException(
                        "Couldn't use data file {!r}: wrong schema: {} instead of {}"
                        .format(self._filename, schema_version,
                                SCHEMA_VERSION))

            for row in db.execute(
                    "select value from meta where key = 'has_arcs'"):
                self._has_arcs = bool(int(row[0]))
                self._has_lines = not self._has_arcs

            for path, file_id in db.execute("select path, id from file"):
                self._file_map[path] = file_id
Exemple #17
0
def read_coverage_config(config_file, warn, **kwargs):
    """Read the coverage.py configuration.

    Arguments:
        config_file: a boolean or string, see the `Coverage` class for the
            tricky details.
        warn: a function to issue warnings.
        all others: keyword arguments from the `Coverage` class, used for
            setting values in the configuration.

    Returns:
        config:
            config is a CoverageConfig object read from the appropriate
            configuration file.

    """
    # Build the configuration from a number of sources:
    # 1) defaults:
    config = CoverageConfig()

    # 2) from a file:
    if config_file:
        files_to_try = config_files_to_try(config_file)

        for fname, our_file, specified_file in files_to_try:
            config_read = config.from_file(fname, warn, our_file=our_file)
            if config_read:
                break
            if specified_file:
                raise CoverageException(f"Couldn't read {fname!r} as a config file")

    # $set_env.py: COVERAGE_DEBUG - Options for --debug.
    # 3) from environment variables:
    env_data_file = os.environ.get('COVERAGE_FILE')
    if env_data_file:
        config.data_file = env_data_file
    debugs = os.environ.get('COVERAGE_DEBUG')
    if debugs:
        config.debug.extend(d.strip() for d in debugs.split(","))

    # 4) from constructor arguments:
    config.from_args(**kwargs)

    # Once all the config has been collected, there's a little post-processing
    # to do.
    config.post_process()

    return config
Exemple #18
0
def get_analysis_to_report(coverage, morfs):
    """Get the files to report on.

    For each morf in `morfs`, if it should be reported on (based on the omit
    and include configuration options), yield a pair, the `FileReporter` and
    `Analysis` for the morf.

    """
    file_reporters = coverage._get_file_reporters(morfs)
    config = coverage.config

    if config.report_include:
        matcher = FnmatchMatcher(prep_patterns(config.report_include),
                                 "report_include")
        file_reporters = [
            fr for fr in file_reporters if matcher.match(fr.filename)
        ]

    if config.report_omit:
        matcher = FnmatchMatcher(prep_patterns(config.report_omit),
                                 "report_omit")
        file_reporters = [
            fr for fr in file_reporters if not matcher.match(fr.filename)
        ]

    if not file_reporters:
        raise CoverageException("No data to report.")

    for fr in sorted(file_reporters):
        try:
            analysis = coverage._analyze(fr)
        except NoSource:
            if not config.ignore_errors:
                raise
        except NotPython:
            # Only report errors for .py files, and only if we didn't
            # explicitly suppress those errors.
            # NotPython is only raised by PythonFileReporter, which has a
            # should_be_python() method.
            if fr.should_be_python():
                if config.ignore_errors:
                    msg = f"Couldn't parse Python file '{fr.filename}'"
                    coverage._warn(msg, slug="couldnt-parse")
                else:
                    raise
        else:
            yield (fr, analysis)
Exemple #19
0
    def execute_one(self, sql, parameters=()):
        """Execute a statement and return the one row that results.

        This is like execute(sql, parameters).fetchone(), except it is
        correct in reading the entire result set.  This will raise an
        exception if more than one row results.

        Returns a row, or None if there were no rows.
        """
        rows = list(self.execute(sql, parameters))
        if len(rows) == 0:
            return None
        elif len(rows) == 1:
            return rows[0]
        else:
            raise CoverageException(
                f"Sql {sql!r} shouldn't return {len(rows)} rows")
Exemple #20
0
    def touch_files(self, filenames, plugin_name=""):
        """Ensure that `filenames` appear in the data, empty if needed.

        `plugin_name` is the name of the plugin responsible for these files. It is used
        to associate the right filereporter, etc.
        """
        if self._debug.should('dataop'):
            self._debug.write(f"Touching {filenames!r}")
        self._start_using()
        with self._connect():  # Use this to get one transaction.
            if not self._has_arcs and not self._has_lines:
                raise CoverageException(
                    "Can't touch files in an empty CoverageData")

            for filename in filenames:
                self._file_id(filename, add=True)
                if plugin_name:
                    # Set the tracer for this file
                    self.add_file_tracers({filename: plugin_name})
Exemple #21
0
def should_fail_under(total, fail_under, precision):
    """Determine if a total should fail due to fail-under.

    `total` is a float, the coverage measurement total. `fail_under` is the
    fail_under setting to compare with. `precision` is the number of digits
    to consider after the decimal point.

    Returns True if the total should fail.

    """
    # We can never achieve higher than 100% coverage, or less than zero.
    if not (0 <= fail_under <= 100.0):
        msg = f"fail_under={fail_under} is invalid. Must be between 0 and 100."
        raise CoverageException(msg)

    # Special case for fail_under=100, it must really be 100.
    if fail_under == 100.0 and total != 100.0:
        return True

    return round(total, precision) < fail_under
Exemple #22
0
    def getregexlist(self, section, option):
        """Read a list of full-line regexes.

        The value of `section` and `option` is treated as a newline-separated
        list of regexes.  Each value is stripped of whitespace.

        Returns the list of strings.

        """
        line_list = self.get(section, option)
        value_list = []
        for value in line_list.splitlines():
            value = value.strip()
            try:
                re.compile(value)
            except re.error as e:
                raise CoverageException(
                    f"Invalid [{section}].{option} value {value!r}: {e}"
                ) from e
            if value:
                value_list.append(value)
        return value_list
Exemple #23
0
    def add(self, pattern, result):
        """Add the `pattern`/`result` pair to the list of aliases.

        `pattern` is an `fnmatch`-style pattern.  `result` is a simple
        string.  When mapping paths, if a path starts with a match against
        `pattern`, then that match is replaced with `result`.  This models
        isomorphic source trees being rooted at different places on two
        different machines.

        `pattern` can't end with a wildcard component, since that would
        match an entire tree, and not just its root.

        """
        pattern_sep = sep(pattern)

        if len(pattern) > 1:
            pattern = pattern.rstrip(r"\/")

        # The pattern can't end with a wildcard component.
        if pattern.endswith("*"):
            raise CoverageException("Pattern must not end with wildcards.")

        # The pattern is meant to match a filepath.  Let's make it absolute
        # unless it already is, or is meant to match any prefix.
        if not pattern.startswith('*') and not isabs_anywhere(pattern +
                                                              pattern_sep):
            pattern = abs_file(pattern)
        if not pattern.endswith(pattern_sep):
            pattern += pattern_sep

        # Make a regex from the pattern.
        regex = fnmatches_to_regex([pattern],
                                   case_insensitive=True,
                                   partial=True)

        # Normalize the result: it must end with a path separator.
        result_sep = sep(result)
        result = result.rstrip(r"\/") + result_sep
        self.aliases.append((regex, result))
Exemple #24
0
    def _get_file_reporter(self, morf):
        """Get a FileReporter for a module or file name."""
        plugin = None
        file_reporter = "python"

        if isinstance(morf, str):
            mapped_morf = self._file_mapper(morf)
            plugin_name = self._data.file_tracer(mapped_morf)
            if plugin_name:
                plugin = self._plugins.get(plugin_name)

                if plugin:
                    file_reporter = plugin.file_reporter(mapped_morf)
                    if file_reporter is None:
                        raise CoverageException(
                            "Plugin {!r} did not provide a file reporter for {!r}."
                            .format(plugin._coverage_plugin_name, morf))

        if file_reporter == "python":
            file_reporter = PythonFileReporter(morf, self)

        return file_reporter
Exemple #25
0
    def switch_context(self, new_context):
        """Switch to a new dynamic context.

        `new_context` is a string to use as the :ref:`dynamic context
        <dynamic_contexts>` label for collected data.  If a :ref:`static
        context <static_contexts>` is in use, the static and dynamic context
        labels will be joined together with a pipe character.

        Coverage collection must be started already.

        .. versionadded:: 5.0

        """
        if not self._started:  # pragma: part started
            raise CoverageException(
                "Cannot switch context, coverage is not started")

        if self._collector.should_start_context:
            self._warn("Conflicting dynamic contexts",
                       slug="dynamic-conflict",
                       once=True)

        self._collector.switch_context(new_context)
Exemple #26
0
 def report(self, morfs, outfile):
     """Fake."""
     self.morfs = morfs
     outfile.write(self.output)
     if self.error:
         raise CoverageException("You asked for it!")
Exemple #27
0
    def update(self, other_data, aliases=None):
        """Update this data with data from several other :class:`CoverageData` instances.

        If `aliases` is provided, it's a `PathAliases` object that is used to
        re-map paths to match the local machine's.
        """
        if self._debug.should('dataop'):
            self._debug.write("Updating with data from {!r}".format(
                getattr(other_data, '_filename', '???'), ))
        if self._has_lines and other_data._has_arcs:
            raise CoverageException("Can't combine arc data with line data")
        if self._has_arcs and other_data._has_lines:
            raise CoverageException("Can't combine line data with arc data")

        aliases = aliases or PathAliases()

        # Force the database we're writing to to exist before we start nesting
        # contexts.
        self._start_using()

        # Collector for all arcs, lines and tracers
        other_data.read()
        with other_data._connect() as conn:
            # Get files data.
            cur = conn.execute('select path from file')
            files = {path: aliases.map(path) for (path, ) in cur}
            cur.close()

            # Get contexts data.
            cur = conn.execute('select context from context')
            contexts = [context for (context, ) in cur]
            cur.close()

            # Get arc data.
            cur = conn.execute(
                'select file.path, context.context, arc.fromno, arc.tono '
                'from arc '
                'inner join file on file.id = arc.file_id '
                'inner join context on context.id = arc.context_id')
            arcs = [(files[path], context, fromno, tono)
                    for (path, context, fromno, tono) in cur]
            cur.close()

            # Get line data.
            cur = conn.execute(
                'select file.path, context.context, line_bits.numbits '
                'from line_bits '
                'inner join file on file.id = line_bits.file_id '
                'inner join context on context.id = line_bits.context_id')
            lines = {(files[path], context): numbits
                     for (path, context, numbits) in cur}
            cur.close()

            # Get tracer data.
            cur = conn.execute('select file.path, tracer '
                               'from tracer '
                               'inner join file on file.id = tracer.file_id')
            tracers = {files[path]: tracer for (path, tracer) in cur}
            cur.close()

        with self._connect() as conn:
            conn.con.isolation_level = 'IMMEDIATE'

            # Get all tracers in the DB. Files not in the tracers are assumed
            # to have an empty string tracer. Since Sqlite does not support
            # full outer joins, we have to make two queries to fill the
            # dictionary.
            this_tracers = {
                path: ''
                for path, in conn.execute('select path from file')
            }
            this_tracers.update({
                aliases.map(path): tracer
                for path, tracer in conn.execute(
                    'select file.path, tracer from tracer '
                    'inner join file on file.id = tracer.file_id')
            })

            # Create all file and context rows in the DB.
            conn.executemany('insert or ignore into file (path) values (?)',
                             ((file, ) for file in files.values()))
            file_ids = {
                path: id
                for id, path in conn.execute('select id, path from file')
            }
            conn.executemany(
                'insert or ignore into context (context) values (?)',
                ((context, ) for context in contexts))
            context_ids = {
                context: id
                for id, context in conn.execute(
                    'select id, context from context')
            }

            # Prepare tracers and fail, if a conflict is found.
            # tracer_paths is used to ensure consistency over the tracer data
            # and tracer_map tracks the tracers to be inserted.
            tracer_map = {}
            for path in files.values():
                this_tracer = this_tracers.get(path)
                other_tracer = tracers.get(path, '')
                # If there is no tracer, there is always the None tracer.
                if this_tracer is not None and this_tracer != other_tracer:
                    raise CoverageException(
                        "Conflicting file tracer name for '{}': {!r} vs {!r}".
                        format(path, this_tracer, other_tracer))
                tracer_map[path] = other_tracer

            # Prepare arc and line rows to be inserted by converting the file
            # and context strings with integer ids. Then use the efficient
            # `executemany()` to insert all rows at once.
            arc_rows = ((file_ids[file], context_ids[context], fromno, tono)
                        for file, context, fromno, tono in arcs)

            # Get line data.
            cur = conn.execute(
                'select file.path, context.context, line_bits.numbits '
                'from line_bits '
                'inner join file on file.id = line_bits.file_id '
                'inner join context on context.id = line_bits.context_id')
            for path, context, numbits in cur:
                key = (aliases.map(path), context)
                if key in lines:
                    numbits = numbits_union(lines[key], numbits)
                lines[key] = numbits
            cur.close()

            if arcs:
                self._choose_lines_or_arcs(arcs=True)

                # Write the combined data.
                conn.executemany(
                    'insert or ignore into arc '
                    '(file_id, context_id, fromno, tono) values (?, ?, ?, ?)',
                    arc_rows)

            if lines:
                self._choose_lines_or_arcs(lines=True)
                conn.execute("delete from line_bits")
                conn.executemany(
                    "insert into line_bits "
                    "(file_id, context_id, numbits) values (?, ?, ?)",
                    [(file_ids[file], context_ids[context], numbits)
                     for (file, context), numbits in lines.items()])
            conn.executemany(
                'insert or ignore into tracer (file_id, tracer) values (?, ?)',
                ((file_ids[filename], tracer)
                 for filename, tracer in tracer_map.items()))

        # Update all internal cache data.
        self._reset()
        self.read()
Exemple #28
0
    def run(self):
        """Run the Python code!"""

        self._prepare2()

        # Create a module to serve as __main__
        main_mod = types.ModuleType('__main__')

        from_pyc = self.arg0.endswith((".pyc", ".pyo"))
        main_mod.__file__ = self.arg0
        if from_pyc:
            main_mod.__file__ = main_mod.__file__[:-1]
        if self.package is not None:
            main_mod.__package__ = self.package
        main_mod.__loader__ = self.loader
        if self.spec is not None:
            main_mod.__spec__ = self.spec

        main_mod.__builtins__ = sys.modules['builtins']

        sys.modules['__main__'] = main_mod

        # Set sys.argv properly.
        sys.argv = self.args

        try:
            # Make a code object somehow.
            if from_pyc:
                code = make_code_from_pyc(self.arg0)
            else:
                code = make_code_from_py(self.arg0)
        except CoverageException:
            raise
        except Exception as exc:
            msg = f"Couldn't run '{self.arg0}' as Python code: {exc.__class__.__name__}: {exc}"
            raise CoverageException(msg) from exc

        # Execute the code object.
        # Return to the original directory in case the test code exits in
        # a non-existent directory.
        cwd = os.getcwd()
        try:
            exec(code, main_mod.__dict__)
        except SystemExit:  # pylint: disable=try-except-raise
            # The user called sys.exit().  Just pass it along to the upper
            # layers, where it will be handled.
            raise
        except Exception:
            # Something went wrong while executing the user code.
            # Get the exc_info, and pack them into an exception that we can
            # throw up to the outer loop.  We peel one layer off the traceback
            # so that the coverage.py code doesn't appear in the final printed
            # traceback.
            typ, err, tb = sys.exc_info()

            # PyPy3 weirdness.  If I don't access __context__, then somehow it
            # is non-None when the exception is reported at the upper layer,
            # and a nested exception is shown to the user.  This getattr fixes
            # it somehow? https://bitbucket.org/pypy/pypy/issue/1903
            getattr(err, '__context__', None)

            # Call the excepthook.
            try:
                err.__traceback__ = err.__traceback__.tb_next
                sys.excepthook(typ, err, tb.tb_next)
            except SystemExit:  # pylint: disable=try-except-raise
                raise
            except Exception as exc:
                # Getting the output right in the case of excepthook
                # shenanigans is kind of involved.
                sys.stderr.write("Error in sys.excepthook:\n")
                typ2, err2, tb2 = sys.exc_info()
                err2.__suppress_context__ = True
                err2.__traceback__ = err2.__traceback__.tb_next
                sys.__excepthook__(typ2, err2, tb2.tb_next)
                sys.stderr.write("\nOriginal exception was:\n")
                raise _ExceptionDuringRun(typ, err, tb.tb_next) from exc
            else:
                sys.exit(1)
        finally:
            os.chdir(cwd)
Exemple #29
0
    def report(self, morfs, outfile=None):
        """Writes a report summarizing coverage statistics per module.

        `outfile` is a file object to write the summary to. It must be opened
        for native strings (bytes on Python 2, Unicode on Python 3).

        """
        self.outfile = outfile or sys.stdout

        self.coverage.get_data().set_query_contexts(
            self.config.report_contexts)
        for fr, analysis in get_analysis_to_report(self.coverage, morfs):
            self.report_one_file(fr, analysis)

        # Prepare the formatting strings, header, and column sorting.
        max_name = max([
            len(fr.relative_filename()) for (fr, analysis) in self.fr_analysis
        ] + [5])
        fmt_name = "%%- %ds  " % max_name
        fmt_skip_covered = "\n%s file%s skipped due to complete coverage."
        fmt_skip_empty = "\n%s empty file%s skipped."

        header = (fmt_name % "Name") + " Stmts   Miss"
        fmt_coverage = fmt_name + "%6d %6d"
        if self.branches:
            header += " Branch BrPart"
            fmt_coverage += " %6d %6d"
        width100 = Numbers(precision=self.config.precision).pc_str_width()
        header += "%*s" % (width100 + 4, "Cover")
        fmt_coverage += "%%%ds%%%%" % (width100 + 3, )
        if self.config.show_missing:
            header += "   Missing"
            fmt_coverage += "   %s"
        rule = "-" * len(header)

        column_order = dict(name=0, stmts=1, miss=2, cover=-1)
        if self.branches:
            column_order.update(dict(branch=3, brpart=4))

        # Write the header
        self.writeout(header)
        self.writeout(rule)

        # `lines` is a list of pairs, (line text, line values).  The line text
        # is a string that will be printed, and line values is a tuple of
        # sortable values.
        lines = []

        for (fr, analysis) in self.fr_analysis:
            nums = analysis.numbers

            args = (fr.relative_filename(), nums.n_statements, nums.n_missing)
            if self.branches:
                args += (nums.n_branches, nums.n_partial_branches)
            args += (nums.pc_covered_str, )
            if self.config.show_missing:
                args += (analysis.missing_formatted(branches=True), )
            text = fmt_coverage % args
            # Add numeric percent coverage so that sorting makes sense.
            args += (nums.pc_covered, )
            lines.append((text, args))

        # Sort the lines and write them out.
        sort_option = (self.config.sort or "name").lower()
        reverse = False
        if sort_option[0] == '-':
            reverse = True
            sort_option = sort_option[1:]
        elif sort_option[0] == '+':
            sort_option = sort_option[1:]

        if sort_option == "name":
            lines = human_sorted_items(lines, reverse=reverse)
        else:
            position = column_order.get(sort_option)
            if position is None:
                raise CoverageException(
                    f"Invalid sorting option: {self.config.sort!r}")
            lines.sort(key=lambda l: (l[1][position], l[0]), reverse=reverse)

        for line in lines:
            self.writeout(line[0])

        # Write a TOTAL line if we had at least one file.
        if self.total.n_files > 0:
            self.writeout(rule)
            args = ("TOTAL", self.total.n_statements, self.total.n_missing)
            if self.branches:
                args += (self.total.n_branches, self.total.n_partial_branches)
            args += (self.total.pc_covered_str, )
            if self.config.show_missing:
                args += ("", )
            self.writeout(fmt_coverage % args)

        # Write other final lines.
        if not self.total.n_files and not self.skipped_count:
            raise CoverageException("No data to report.")

        if self.config.skip_covered and self.skipped_count:
            self.writeout(
                fmt_skip_covered %
                (self.skipped_count, 's' if self.skipped_count > 1 else ''))
        if self.config.skip_empty and self.empty_count:
            self.writeout(
                fmt_skip_empty %
                (self.empty_count, 's' if self.empty_count > 1 else ''))

        return self.total.n_statements and self.total.pc_covered
Exemple #30
0
    def __init__(
        self,
        should_trace,
        check_include,
        should_start_context,
        file_mapper,
        timid,
        branch,
        warn,
        concurrency,
    ):
        """Create a collector.

        `should_trace` is a function, taking a file name and a frame, and
        returning a `coverage.FileDisposition object`.

        `check_include` is a function taking a file name and a frame. It returns
        a boolean: True if the file should be traced, False if not.

        `should_start_context` is a function taking a frame, and returning a
        string. If the frame should be the start of a new context, the string
        is the new context. If the frame should not be the start of a new
        context, return None.

        `file_mapper` is a function taking a filename, and returning a Unicode
        filename.  The result is the name that will be recorded in the data
        file.

        If `timid` is true, then a slower simpler trace function will be
        used.  This is important for some environments where manipulation of
        tracing functions make the faster more sophisticated trace function not
        operate properly.

        If `branch` is true, then branches will be measured.  This involves
        collecting data on which statements followed each other (arcs).  Use
        `get_arc_data` to get the arc data.

        `warn` is a warning function, taking a single string message argument
        and an optional slug argument which will be a string or None, to be
        used if a warning needs to be issued.

        `concurrency` is a list of strings indicating the concurrency libraries
        in use.  Valid values are "greenlet", "eventlet", "gevent", or "thread"
        (the default).  Of these four values, only one can be supplied.  Other
        values are ignored.

        """
        self.should_trace = should_trace
        self.check_include = check_include
        self.should_start_context = should_start_context
        self.file_mapper = file_mapper
        self.warn = warn
        self.branch = branch
        self.threading = None
        self.covdata = None

        self.static_context = None

        self.origin = short_stack()

        self.concur_id_func = None
        self.mapped_file_cache = {}

        # We can handle a few concurrency options here, but only one at a time.
        these_concurrencies = self.SUPPORTED_CONCURRENCIES.intersection(
            concurrency)
        if len(these_concurrencies) > 1:
            raise CoverageException(
                f"Conflicting concurrency settings: {concurrency}")
        self.concurrency = these_concurrencies.pop(
        ) if these_concurrencies else ''

        try:
            if self.concurrency == "greenlet":
                import greenlet
                self.concur_id_func = greenlet.getcurrent
            elif self.concurrency == "eventlet":
                import eventlet.greenthread  # pylint: disable=import-error,useless-suppression
                self.concur_id_func = eventlet.greenthread.getcurrent
            elif self.concurrency == "gevent":
                import gevent  # pylint: disable=import-error,useless-suppression
                self.concur_id_func = gevent.getcurrent
            elif self.concurrency == "thread" or not self.concurrency:
                # It's important to import threading only if we need it.  If
                # it's imported early, and the program being measured uses
                # gevent, then gevent's monkey-patching won't work properly.
                import threading
                self.threading = threading
            else:
                raise CoverageException(
                    f"Don't understand concurrency={concurrency}")
        except ImportError as ex:
            raise CoverageException(
                "Couldn't trace with concurrency={}, the module isn't installed."
                .format(self.concurrency, )) from ex

        self.reset()

        if timid:
            # Being timid: use the simple Python trace function.
            self._trace_class = PyTracer
        else:
            # Being fast: use the C Tracer if it is available, else the Python
            # trace function.
            self._trace_class = CTracer or PyTracer

        if self._trace_class is CTracer:
            self.file_disposition_class = CFileDisposition
            self.supports_plugins = True
        else:
            self.file_disposition_class = FileDisposition
            self.supports_plugins = False