def add_file_tracers(self, file_tracers): """Add per-file plugin information. `file_tracers` is { filename: plugin_name, ... } """ if self._debug.should('dataop'): self._debug.write("Adding file tracers: %d files" % (len(file_tracers),)) if not file_tracers: return self._start_using() with self._connect() as con: for filename, plugin_name in iitems(file_tracers): file_id = self._file_id(filename) if file_id is None: raise CoverageException( "Can't add file tracer data for unmeasured file '%s'" % (filename,) ) existing_plugin = self.file_tracer(filename) if existing_plugin: if existing_plugin != plugin_name: raise CoverageException( "Conflicting file tracer name for '%s': %r vs %r" % ( filename, existing_plugin, plugin_name, ) ) elif plugin_name: con.execute( "insert into tracer (file_id, tracer) values (?, ?)", (file_id, plugin_name) )
def _read_db(self): """Read the metadata from a database so that we are ready to use it.""" with self._dbs[get_thread_id()] as db: try: schema_version, = db.execute_one("select version from coverage_schema") except Exception as exc: raise CoverageException( "Data file {!r} doesn't seem to be a coverage data file: {}".format( self._filename, exc ) ) else: if schema_version != SCHEMA_VERSION: raise CoverageException( "Couldn't use data file {!r}: wrong schema: {} instead of {}".format( self._filename, schema_version, SCHEMA_VERSION ) ) for row in db.execute("select value from meta where key = 'has_arcs'"): self._has_arcs = bool(int(row[0])) self._has_lines = not self._has_arcs for path, file_id in db.execute("select path, id from file"): self._file_map[path] = file_id
def get_option(self, option_name): """Get an option from the configuration. `option_name` is a colon-separated string indicating the section and option name. For example, the ``branch`` option in the ``[run]`` section of the config file would be indicated with `"run:branch"`. Returns the value of the option. """ # Special-cased options. if option_name == "paths": return self.paths # Check all the hard-coded options. for option_spec in self.CONFIG_FILE_OPTIONS: attr, where = option_spec[:2] if where == option_name: return getattr(self, attr) # See if it's a plugin option. plugin_name, _, key = option_name.partition(":") if key and plugin_name in self.plugins: return self.plugin_options.get(plugin_name, {}).get(key) # If we get here, we didn't find the option. raise CoverageException("No such option: %r" % option_name)
def read(self, filenames): # RawConfigParser takes a filename or list of filenames, but we only # ever call this with a single filename. assert isinstance(filenames, path_types) filename = filenames if env.PYVERSION >= (3, 6): filename = os.fspath(filename) try: with io.open(filename, encoding='utf-8') as fp: toml_text = fp.read() except IOError: return [] if toml: toml_text = substitute_variables(toml_text, os.environ) try: self.data = toml.loads(toml_text) except toml.TomlDecodeError as err: raise TomlDecodeError(*err.args) return [filename] else: has_toml = re.search(r"^\[tool\.coverage\.", toml_text, flags=re.MULTILINE) if self.our_file or has_toml: # Looks like they meant to read TOML, but we can't read it. msg = "Can't read {!r} without TOML support. Install with [toml] extra" raise CoverageException(msg.format(filename)) return []
def report(self, morfs): """Generate an HTML report for `morfs`. `morfs` is a list of modules or file names. """ # Read the status data and check that this run used the same # global data as the last run. self.incr.read() self.incr.check_global_data(self.config, self.pyfile_html_source) # Process all the files. for fr, analysis in get_analysis_to_report(self.coverage, morfs): self.html_file(fr, analysis) if not self.all_files_nums: raise CoverageException("No data to report.") self.totals = sum(self.all_files_nums) # Write the index file. self.index_file() self.make_local_static_report_files() return self.totals.n_statements and self.totals.pc_covered
def data_filename(fname, pkgdir=""): """Return the path to a data file of ours. The file is searched for on `STATIC_PATH`, and the first place it's found, is returned. Each directory in `STATIC_PATH` is searched as-is, and also, if `pkgdir` is provided, at that sub-directory. """ tried = [] for static_dir in STATIC_PATH: static_filename = os.path.join(static_dir, fname) if os.path.exists(static_filename): return static_filename else: tried.append(static_filename) if pkgdir: static_filename = os.path.join(static_dir, pkgdir, fname) if os.path.exists(static_filename): return static_filename else: tried.append(static_filename) raise CoverageException("Couldn't find static file %r from %r, tried: %r" % (fname, os.getcwd(), tried))
def _choose_lines_or_arcs(self, lines=False, arcs=False): """Force the data file to choose between lines and arcs.""" assert lines or arcs assert not (lines and arcs) if lines and self._has_arcs: raise CoverageException("Can't add line measurements to existing branch data") if arcs and self._has_lines: raise CoverageException("Can't add branch measurements to existing line data") if not self._has_arcs and not self._has_lines: self._has_lines = lines self._has_arcs = arcs with self._connect() as con: con.execute( "insert into meta (key, value) values (?, ?)", ('has_arcs', str(int(arcs))) )
def _start_tracer(self): """Start a new Tracer object, and store it in self.tracers.""" tracer = self._trace_class() tracer.data = self.data tracer.trace_arcs = self.branch tracer.should_trace = self.should_trace tracer.should_trace_cache = self.should_trace_cache tracer.warn = self.warn if hasattr(tracer, 'concur_id_func'): tracer.concur_id_func = self.concur_id_func elif self.concur_id_func: raise CoverageException( "Can't support concurrency=%s with %s, only threads are supported" % ( self.concurrency, self.tracer_name(), )) if hasattr(tracer, 'file_tracers'): tracer.file_tracers = self.file_tracers if hasattr(tracer, 'threading'): tracer.threading = self.threading if hasattr(tracer, 'check_include'): tracer.check_include = self.check_include if hasattr(tracer, 'should_start_context'): tracer.should_start_context = self.should_start_context tracer.switch_context = self.switch_context if hasattr(tracer, 'disable_plugin'): tracer.disable_plugin = self.disable_plugin fn = tracer.start() self.tracers.append(tracer) return fn
def execute(self, sql, parameters=()): """Same as :meth:`python:sqlite3.Connection.execute`.""" if self.debug: tail = " with {!r}".format(parameters) if parameters else "" self.debug.write("Executing {!r}{}".format(sql, tail)) try: try: return self.con.execute(sql, parameters) except Exception: # In some cases, an error might happen that isn't really an # error. Try again immediately. # https://github.com/nedbat/coveragepy/issues/1010 return self.con.execute(sql, parameters) except sqlite3.Error as exc: msg = str(exc) try: # `execute` is the first thing we do with the database, so try # hard to provide useful hints if something goes wrong now. with open(self.filename, "rb") as bad_file: cov4_sig = b"!coverage.py: This is a private format" if bad_file.read(len(cov4_sig)) == cov4_sig: msg = ( "Looks like a coverage 4.x data file. " "Are you mixing versions of coverage?" ) except Exception: pass if self.debug: self.debug.write("EXCEPTION from execute: {}".format(msg)) raise CoverageException("Couldn't use data file {!r}: {}".format(self.filename, msg))
def load_plugins(cls, modules, config, debug=None): """Load plugins from `modules`. Returns a Plugins object with the loaded and configured plugins. """ plugins = cls() plugins.debug = debug for module in modules: plugins.current_module = module __import__(module) mod = sys.modules[module] coverage_init = getattr(mod, "coverage_init", None) if not coverage_init: raise CoverageException( "Plugin module %r didn't define a coverage_init function" % module) options = config.get_plugin_options(module) coverage_init(plugins, options) plugins.current_module = None return plugins
def getregexlist(self, section, option): name, values = self._get(section, option) self._check_type(name, option, values, list, "a list") for value in values: value = value.strip() try: re.compile(value) except re.error as e: raise CoverageException("Invalid [%s].%s value %r: %s" % (name, option, value, e)) return values
def source_for_morf(morf): """Get the source filename for the module-or-file `morf`.""" if hasattr(morf, '__file__') and morf.__file__: filename = morf.__file__ elif isinstance(morf, types.ModuleType): # A module should have had .__file__, otherwise we can't use it. # This could be a PEP-420 namespace package. raise CoverageException("Module {} has no file".format(morf)) else: filename = morf filename = source_for_file(files.unicode_filename(filename)) return filename
def read_coverage_config(config_file, **kwargs): """Read the coverage.py configuration. Arguments: config_file: a boolean or string, see the `Coverage` class for the tricky details. all others: keyword arguments from the `Coverage` class, used for setting values in the configuration. Returns: config: config is a CoverageConfig object read from the appropriate configuration file. """ # Build the configuration from a number of sources: # 1) defaults: config = CoverageConfig() # 2) from a file: if config_file: files_to_try = config_files_to_try(config_file) for fname, our_file, specified_file in files_to_try: config_read = config.from_file(fname, our_file=our_file) if config_read: break if specified_file: raise CoverageException("Couldn't read '%s' as a config file" % fname) # $set_env.py: COVERAGE_DEBUG - Options for --debug. # 3) from environment variables: env_data_file = os.environ.get('COVERAGE_FILE') if env_data_file: config.data_file = env_data_file debugs = os.environ.get('COVERAGE_DEBUG') if debugs: config.debug.extend(d.strip() for d in debugs.split(",")) # 4) from constructor arguments: config.from_args(**kwargs) # Once all the config has been collected, there's a little post-processing # to do. config.post_process() return config
def execute_one(self, sql, parameters=()): """Execute a statement and return the one row that results. This is like execute(sql, parameters).fetchone(), except it is correct in reading the entire result set. This will raise an exception if more than one row results. Returns a row, or None if there were no rows. """ rows = list(self.execute(sql, parameters)) if len(rows) == 0: return None elif len(rows) == 1: return rows[0] else: raise CoverageException("Sql {!r} shouldn't return {} rows".format(sql, len(rows)))
def get_analysis_to_report(coverage, morfs): """Get the files to report on. For each morf in `morfs`, if it should be reported on (based on the omit and include configuration options), yield a pair, the `FileReporter` and `Analysis` for the morf. """ file_reporters = coverage._get_file_reporters(morfs) config = coverage.config if config.report_include: matcher = FnmatchMatcher(prep_patterns(config.report_include)) file_reporters = [ fr for fr in file_reporters if matcher.match(fr.filename) ] if config.report_omit: matcher = FnmatchMatcher(prep_patterns(config.report_omit)) file_reporters = [ fr for fr in file_reporters if not matcher.match(fr.filename) ] if not file_reporters: raise CoverageException("No data to report.") for fr in sorted(file_reporters): try: analysis = coverage._analyze(fr) except NoSource: if not config.ignore_errors: raise except NotPython: # Only report errors for .py files, and only if we didn't # explicitly suppress those errors. # NotPython is only raised by PythonFileReporter, which has a # should_be_python() method. if fr.should_be_python(): if config.ignore_errors: msg = "Couldn't parse Python file '{}'".format(fr.filename) coverage._warn(msg, slug="couldnt-parse") else: raise else: yield (fr, analysis)
def touch_files(self, filenames, plugin_name=""): """Ensure that `filenames` appear in the data, empty if needed. `plugin_name` is the name of the plugin responsible for these files. It is used to associate the right filereporter, etc. """ if self._debug.should('dataop'): self._debug.write("Touching %r" % (filenames,)) self._start_using() with self._connect(): # Use this to get one transaction. if not self._has_arcs and not self._has_lines: raise CoverageException("Can't touch files in an empty CoverageData") for filename in filenames: self._file_id(filename, add=True) if plugin_name: # Set the tracer for this file self.add_file_tracers({filename: plugin_name})
def should_fail_under(total, fail_under, precision): """Determine if a total should fail due to fail-under. `total` is a float, the coverage measurement total. `fail_under` is the fail_under setting to compare with. `precision` is the number of digits to consider after the decimal point. Returns True if the total should fail. """ # We can never achieve higher than 100% coverage, or less than zero. if not (0 <= fail_under <= 100.0): msg = "fail_under={} is invalid. Must be between 0 and 100.".format(fail_under) raise CoverageException(msg) # Special case for fail_under=100, it must really be 100. if fail_under == 100.0 and total != 100.0: return True return round(total, precision) < fail_under
def getregexlist(self, section, option): """Read a list of full-line regexes. The value of `section` and `option` is treated as a newline-separated list of regexes. Each value is stripped of whitespace. Returns the list of strings. """ line_list = self.get(section, option) value_list = [] for value in line_list.splitlines(): value = value.strip() try: re.compile(value) except re.error as e: raise CoverageException("Invalid [%s].%s value %r: %s" % (section, option, value, e)) if value: value_list.append(value) return value_list
def _get_file_reporter(self, morf): """Get a FileReporter for a module or file name.""" plugin = None file_reporter = "python" if isinstance(morf, string_class): mapped_morf = self._file_mapper(morf) plugin_name = self._data.file_tracer(mapped_morf) if plugin_name: plugin = self._plugins.get(plugin_name) if plugin: file_reporter = plugin.file_reporter(mapped_morf) if file_reporter is None: raise CoverageException( "Plugin %r did not provide a file reporter for %r." % (plugin._coverage_plugin_name, morf)) if file_reporter == "python": file_reporter = PythonFileReporter(morf, self) return file_reporter
def add(self, pattern, result): """Add the `pattern`/`result` pair to the list of aliases. `pattern` is an `fnmatch`-style pattern. `result` is a simple string. When mapping paths, if a path starts with a match against `pattern`, then that match is replaced with `result`. This models isomorphic source trees being rooted at different places on two different machines. `pattern` can't end with a wildcard component, since that would match an entire tree, and not just its root. """ pattern_sep = sep(pattern) if len(pattern) > 1: pattern = pattern.rstrip(r"\/") # The pattern can't end with a wildcard component. if pattern.endswith("*"): raise CoverageException("Pattern must not end with wildcards.") # The pattern is meant to match a filepath. Let's make it absolute # unless it already is, or is meant to match any prefix. if not pattern.startswith('*') and not isabs_anywhere(pattern + pattern_sep): pattern = abs_file(pattern) if not pattern.endswith(pattern_sep): pattern += pattern_sep # Make a regex from the pattern. regex = fnmatches_to_regex([pattern], case_insensitive=True, partial=True) # Normalize the result: it must end with a path separator. result_sep = sep(result) result = result.rstrip(r"\/") + result_sep self.aliases.append((regex, result))
def switch_context(self, new_context): """Switch to a new dynamic context. `new_context` is a string to use as the :ref:`dynamic context <dynamic_contexts>` label for collected data. If a :ref:`static context <static_contexts>` is in use, the static and dynamic context labels will be joined together with a pipe character. Coverage collection must be started already. .. versionadded:: 5.0 """ if not self._started: # pragma: part started raise CoverageException( "Cannot switch context, coverage is not started") if self._collector.should_start_context: self._warn("Conflicting dynamic contexts", slug="dynamic-conflict", once=True) self._collector.switch_context(new_context)
def loads(self, data): """Deserialize data from :meth:`dumps` Use with a newly-created empty :class:`CoverageData` object. It's undefined what happens if the object already has data in it. Arguments: data: A byte string of serialized data produced by :meth:`dumps`. .. versionadded:: 5.0 """ if self._debug.should('dataio'): self._debug.write("Loading data into data file {!r}".format(self._filename)) if data[:1] != b'z': raise CoverageException( "Unrecognized serialization: {!r} (head of {} bytes)".format(data[:40], len(data)) ) script = to_string(zlib.decompress(data[1:])) self._dbs[get_thread_id()] = db = SqliteDb(self._filename, self._debug) with db: db.executescript(script) self._read_db() self._have_used = True
def _init_for_start(self): """Initialization for start()""" # Construct the collector. concurrency = self.config.concurrency or () if "multiprocessing" in concurrency: if not patch_multiprocessing: raise CoverageException( # pragma: only jython "multiprocessing is not supported on this Python") patch_multiprocessing(rcfile=self.config.config_file) dycon = self.config.dynamic_context if not dycon or dycon == "none": context_switchers = [] elif dycon == "test_function": context_switchers = [should_start_context_test_function] else: raise CoverageException( "Don't understand dynamic_context setting: {!r}".format(dycon)) context_switchers.extend(plugin.dynamic_context for plugin in self._plugins.context_switchers) should_start_context = combine_context_switchers(context_switchers) self._collector = Collector( should_trace=self._should_trace, check_include=self._check_include_omit_etc, should_start_context=should_start_context, file_mapper=self._file_mapper, timid=self.config.timid, branch=self.config.branch, warn=self._warn, concurrency=concurrency, ) suffix = self._data_suffix_specified if suffix or self.config.parallel: if not isinstance(suffix, string_class): # if data_suffix=True, use .machinename.pid.random suffix = True else: suffix = None self._init_data(suffix) self._collector.use_data(self._data, self.config.context) # Early warning if we aren't going to be able to support plugins. if self._plugins.file_tracers and not self._collector.supports_plugins: self._warn("Plugin file tracers (%s) aren't supported with %s" % ( ", ".join(plugin._coverage_plugin_name for plugin in self._plugins.file_tracers), self._collector.tracer_name(), )) for plugin in self._plugins.file_tracers: plugin._coverage_enabled = False # Create the file classifying substructure. self._inorout = InOrOut( warn=self._warn, debug=(self._debug if self._debug.should('trace') else None), ) self._inorout.configure(self.config) self._inorout.plugins = self._plugins self._inorout.disp_class = self._collector.file_disposition_class # It's useful to write debug info after initing for start. self._should_write_debug = True atexit.register(self._atexit)
def run(self): """Run the Python code!""" self._prepare2() # Create a module to serve as __main__ main_mod = types.ModuleType('__main__') from_pyc = self.arg0.endswith((".pyc", ".pyo")) main_mod.__file__ = self.arg0 if from_pyc: main_mod.__file__ = main_mod.__file__[:-1] if self.package is not None: main_mod.__package__ = self.package main_mod.__loader__ = self.loader if self.spec is not None: main_mod.__spec__ = self.spec main_mod.__builtins__ = BUILTINS sys.modules['__main__'] = main_mod # Set sys.argv properly. sys.argv = self.args try: # Make a code object somehow. if from_pyc: code = make_code_from_pyc(self.arg0) else: code = make_code_from_py(self.arg0) except CoverageException: raise except Exception as exc: msg = "Couldn't run '{filename}' as Python code: {exc.__class__.__name__}: {exc}" raise CoverageException(msg.format(filename=self.arg0, exc=exc)) # Execute the code object. # Return to the original directory in case the test code exits in # a non-existent directory. cwd = os.getcwd() try: exec(code, main_mod.__dict__) except SystemExit: # pylint: disable=try-except-raise # The user called sys.exit(). Just pass it along to the upper # layers, where it will be handled. raise except Exception: # Something went wrong while executing the user code. # Get the exc_info, and pack them into an exception that we can # throw up to the outer loop. We peel one layer off the traceback # so that the coverage.py code doesn't appear in the final printed # traceback. typ, err, tb = sys.exc_info() # PyPy3 weirdness. If I don't access __context__, then somehow it # is non-None when the exception is reported at the upper layer, # and a nested exception is shown to the user. This getattr fixes # it somehow? https://bitbucket.org/pypy/pypy/issue/1903 getattr(err, '__context__', None) # Call the excepthook. try: if hasattr(err, "__traceback__"): err.__traceback__ = err.__traceback__.tb_next sys.excepthook(typ, err, tb.tb_next) except SystemExit: # pylint: disable=try-except-raise raise except Exception: # Getting the output right in the case of excepthook # shenanigans is kind of involved. sys.stderr.write("Error in sys.excepthook:\n") typ2, err2, tb2 = sys.exc_info() err2.__suppress_context__ = True if hasattr(err2, "__traceback__"): err2.__traceback__ = err2.__traceback__.tb_next sys.__excepthook__(typ2, err2, tb2.tb_next) sys.stderr.write("\nOriginal exception was:\n") raise ExceptionDuringRun(typ, err, tb.tb_next) else: sys.exit(1) finally: os.chdir(cwd)
def __init__( self, should_trace, check_include, should_start_context, file_mapper, timid, branch, warn, concurrency, ): """Create a collector. `should_trace` is a function, taking a file name and a frame, and returning a `coverage.FileDisposition object`. `check_include` is a function taking a file name and a frame. It returns a boolean: True if the file should be traced, False if not. `should_start_context` is a function taking a frame, and returning a string. If the frame should be the start of a new context, the string is the new context. If the frame should not be the start of a new context, return None. `file_mapper` is a function taking a filename, and returning a Unicode filename. The result is the name that will be recorded in the data file. If `timid` is true, then a slower simpler trace function will be used. This is important for some environments where manipulation of tracing functions make the faster more sophisticated trace function not operate properly. If `branch` is true, then branches will be measured. This involves collecting data on which statements followed each other (arcs). Use `get_arc_data` to get the arc data. `warn` is a warning function, taking a single string message argument and an optional slug argument which will be a string or None, to be used if a warning needs to be issued. `concurrency` is a list of strings indicating the concurrency libraries in use. Valid values are "greenlet", "eventlet", "gevent", or "thread" (the default). Of these four values, only one can be supplied. Other values are ignored. """ self.should_trace = should_trace self.check_include = check_include self.should_start_context = should_start_context self.file_mapper = file_mapper self.warn = warn self.branch = branch self.threading = None self.covdata = None self.static_context = None self.origin = short_stack() self.concur_id_func = None self.mapped_file_cache = {} # We can handle a few concurrency options here, but only one at a time. these_concurrencies = self.SUPPORTED_CONCURRENCIES.intersection( concurrency) if len(these_concurrencies) > 1: raise CoverageException("Conflicting concurrency settings: %s" % concurrency) self.concurrency = these_concurrencies.pop( ) if these_concurrencies else '' try: if self.concurrency == "greenlet": import greenlet self.concur_id_func = greenlet.getcurrent elif self.concurrency == "eventlet": import eventlet.greenthread # pylint: disable=import-error,useless-suppression self.concur_id_func = eventlet.greenthread.getcurrent elif self.concurrency == "gevent": import gevent # pylint: disable=import-error,useless-suppression self.concur_id_func = gevent.getcurrent elif self.concurrency == "thread" or not self.concurrency: # It's important to import threading only if we need it. If # it's imported early, and the program being measured uses # gevent, then gevent's monkey-patching won't work properly. import threading self.threading = threading else: raise CoverageException("Don't understand concurrency=%s" % concurrency) except ImportError: raise CoverageException( "Couldn't trace with concurrency=%s, the module isn't installed." % (self.concurrency, )) self.reset() if timid: # Being timid: use the simple Python trace function. self._trace_class = PyTracer else: # Being fast: use the C Tracer if it is available, else the Python # trace function. self._trace_class = CTracer or PyTracer if self._trace_class is CTracer: self.file_disposition_class = CFileDisposition self.supports_plugins = True else: self.file_disposition_class = FileDisposition self.supports_plugins = False
def should_trace(self, filename, frame=None): """Decide whether to trace execution in `filename`, with a reason. This function is called from the trace function. As each new file name is encountered, this function determines whether it is traced or not. Returns a FileDisposition object. """ original_filename = filename disp = disposition_init(self.disp_class, filename) def nope(disp, reason): """Simple helper to make it easy to return NO.""" disp.trace = False disp.reason = reason return disp if frame is not None: # Compiled Python files have two file names: frame.f_code.co_filename is # the file name at the time the .pyc was compiled. The second name is # __file__, which is where the .pyc was actually loaded from. Since # .pyc files can be moved after compilation (for example, by being # installed), we look for __file__ in the frame and prefer it to the # co_filename value. dunder_file = frame.f_globals and frame.f_globals.get('__file__') if dunder_file: filename = source_for_file(dunder_file) if original_filename and not original_filename.startswith('<'): orig = os.path.basename(original_filename) if orig != os.path.basename(filename): # Files shouldn't be renamed when moved. This happens when # exec'ing code. If it seems like something is wrong with # the frame's file name, then just use the original. filename = original_filename if not filename: # Empty string is pretty useless. return nope(disp, "empty string isn't a file name") if filename.startswith('memory:'): return nope(disp, "memory isn't traceable") if filename.startswith('<'): # Lots of non-file execution is represented with artificial # file names like "<string>", "<doctest readme.txt[0]>", or # "<exec_function>". Don't ever trace these executions, since we # can't do anything with the data later anyway. return nope(disp, "not a real file name") # pyexpat does a dumb thing, calling the trace function explicitly from # C code with a C file name. if re.search(r"[/\\]Modules[/\\]pyexpat.c", filename): return nope(disp, "pyexpat lies about itself") # Jython reports the .class file to the tracer, use the source file. if filename.endswith("$py.class"): filename = filename[:-9] + ".py" canonical = canonical_filename(filename) disp.canonical_filename = canonical # Try the plugins, see if they have an opinion about the file. plugin = None for plugin in self.plugins.file_tracers: if not plugin._coverage_enabled: continue try: file_tracer = plugin.file_tracer(canonical) if file_tracer is not None: file_tracer._coverage_plugin = plugin disp.trace = True disp.file_tracer = file_tracer if file_tracer.has_dynamic_source_filename(): disp.has_dynamic_filename = True else: disp.source_filename = canonical_filename( file_tracer.source_filename()) break except Exception: self.warn("Disabling plug-in %r due to an exception:" % (plugin._coverage_plugin_name)) traceback.print_exc() plugin._coverage_enabled = False continue else: # No plugin wanted it: it's Python. disp.trace = True disp.source_filename = canonical if not disp.has_dynamic_filename: if not disp.source_filename: raise CoverageException( "Plugin %r didn't set source_filename for %r" % (plugin, disp.original_filename)) reason = self.check_include_omit_etc(disp.source_filename, frame) if reason: nope(disp, reason) return disp
def update(self, other_data, aliases=None): """Update this data with data from several other :class:`CoverageData` instances. If `aliases` is provided, it's a `PathAliases` object that is used to re-map paths to match the local machine's. """ if self._debug.should('dataop'): self._debug.write("Updating with data from %r" % ( getattr(other_data, '_filename', '???'), )) if self._has_lines and other_data._has_arcs: raise CoverageException("Can't combine arc data with line data") if self._has_arcs and other_data._has_lines: raise CoverageException("Can't combine line data with arc data") aliases = aliases or PathAliases() # Force the database we're writing to to exist before we start nesting # contexts. self._start_using() # Collector for all arcs, lines and tracers other_data.read() with other_data._connect() as conn: # Get files data. cur = conn.execute('select path from file') files = {path: aliases.map(path) for (path,) in cur} cur.close() # Get contexts data. cur = conn.execute('select context from context') contexts = [context for (context,) in cur] cur.close() # Get arc data. cur = conn.execute( 'select file.path, context.context, arc.fromno, arc.tono ' 'from arc ' 'inner join file on file.id = arc.file_id ' 'inner join context on context.id = arc.context_id' ) arcs = [(files[path], context, fromno, tono) for (path, context, fromno, tono) in cur] cur.close() # Get line data. cur = conn.execute( 'select file.path, context.context, line_bits.numbits ' 'from line_bits ' 'inner join file on file.id = line_bits.file_id ' 'inner join context on context.id = line_bits.context_id' ) lines = { (files[path], context): numbits for (path, context, numbits) in cur } cur.close() # Get tracer data. cur = conn.execute( 'select file.path, tracer ' 'from tracer ' 'inner join file on file.id = tracer.file_id' ) tracers = {files[path]: tracer for (path, tracer) in cur} cur.close() with self._connect() as conn: conn.con.isolation_level = 'IMMEDIATE' # Get all tracers in the DB. Files not in the tracers are assumed # to have an empty string tracer. Since Sqlite does not support # full outer joins, we have to make two queries to fill the # dictionary. this_tracers = {path: '' for path, in conn.execute('select path from file')} this_tracers.update({ aliases.map(path): tracer for path, tracer in conn.execute( 'select file.path, tracer from tracer ' 'inner join file on file.id = tracer.file_id' ) }) # Create all file and context rows in the DB. conn.executemany( 'insert or ignore into file (path) values (?)', ((file,) for file in files.values()) ) file_ids = { path: id for id, path in conn.execute('select id, path from file') } conn.executemany( 'insert or ignore into context (context) values (?)', ((context,) for context in contexts) ) context_ids = { context: id for id, context in conn.execute('select id, context from context') } # Prepare tracers and fail, if a conflict is found. # tracer_paths is used to ensure consistency over the tracer data # and tracer_map tracks the tracers to be inserted. tracer_map = {} for path in files.values(): this_tracer = this_tracers.get(path) other_tracer = tracers.get(path, '') # If there is no tracer, there is always the None tracer. if this_tracer is not None and this_tracer != other_tracer: raise CoverageException( "Conflicting file tracer name for '%s': %r vs %r" % ( path, this_tracer, other_tracer ) ) tracer_map[path] = other_tracer # Prepare arc and line rows to be inserted by converting the file # and context strings with integer ids. Then use the efficient # `executemany()` to insert all rows at once. arc_rows = ( (file_ids[file], context_ids[context], fromno, tono) for file, context, fromno, tono in arcs ) # Get line data. cur = conn.execute( 'select file.path, context.context, line_bits.numbits ' 'from line_bits ' 'inner join file on file.id = line_bits.file_id ' 'inner join context on context.id = line_bits.context_id' ) for path, context, numbits in cur: key = (aliases.map(path), context) if key in lines: numbits = numbits_union(lines[key], numbits) lines[key] = numbits cur.close() if arcs: self._choose_lines_or_arcs(arcs=True) # Write the combined data. conn.executemany( 'insert or ignore into arc ' '(file_id, context_id, fromno, tono) values (?, ?, ?, ?)', arc_rows ) if lines: self._choose_lines_or_arcs(lines=True) conn.execute("delete from line_bits") conn.executemany( "insert into line_bits " "(file_id, context_id, numbits) values (?, ?, ?)", [ (file_ids[file], context_ids[context], numbits) for (file, context), numbits in lines.items() ] ) conn.executemany( 'insert or ignore into tracer (file_id, tracer) values (?, ?)', ((file_ids[filename], tracer) for filename, tracer in tracer_map.items()) ) # Update all internal cache data. self._reset() self.read()
def report(self, morfs, outfile=None): """Writes a report summarizing coverage statistics per module. `outfile` is a file object to write the summary to. It must be opened for native strings (bytes on Python 2, Unicode on Python 3). """ self.outfile = outfile or sys.stdout self.coverage.get_data().set_query_contexts(self.config.report_contexts) for fr, analysis in get_analysis_to_report(self.coverage, morfs): self.report_one_file(fr, analysis) # Prepare the formatting strings, header, and column sorting. max_name = max([len(fr.relative_filename()) for (fr, analysis) in self.fr_analysis] + [5]) fmt_name = u"%%- %ds " % max_name fmt_skip_covered = u"\n%s file%s skipped due to complete coverage." fmt_skip_empty = u"\n%s empty file%s skipped." header = (fmt_name % "Name") + u" Stmts Miss" fmt_coverage = fmt_name + u"%6d %6d" if self.branches: header += u" Branch BrPart" fmt_coverage += u" %6d %6d" width100 = Numbers.pc_str_width() header += u"%*s" % (width100+4, "Cover") fmt_coverage += u"%%%ds%%%%" % (width100+3,) if self.config.show_missing: header += u" Missing" fmt_coverage += u" %s" rule = u"-" * len(header) column_order = dict(name=0, stmts=1, miss=2, cover=-1) if self.branches: column_order.update(dict(branch=3, brpart=4)) # Write the header self.writeout(header) self.writeout(rule) # `lines` is a list of pairs, (line text, line values). The line text # is a string that will be printed, and line values is a tuple of # sortable values. lines = [] for (fr, analysis) in self.fr_analysis: nums = analysis.numbers args = (fr.relative_filename(), nums.n_statements, nums.n_missing) if self.branches: args += (nums.n_branches, nums.n_partial_branches) args += (nums.pc_covered_str,) if self.config.show_missing: args += (analysis.missing_formatted(branches=True),) text = fmt_coverage % args # Add numeric percent coverage so that sorting makes sense. args += (nums.pc_covered,) lines.append((text, args)) # Sort the lines and write them out. if getattr(self.config, 'sort', None): sort_option = self.config.sort.lower() reverse = False if sort_option[0] == '-': reverse = True sort_option = sort_option[1:] elif sort_option[0] == '+': sort_option = sort_option[1:] position = column_order.get(sort_option) if position is None: raise CoverageException("Invalid sorting option: {!r}".format(self.config.sort)) lines.sort(key=lambda l: (l[1][position], l[0]), reverse=reverse) for line in lines: self.writeout(line[0]) # Write a TOTAL line if we had at least one file. if self.total.n_files > 0: self.writeout(rule) args = ("TOTAL", self.total.n_statements, self.total.n_missing) if self.branches: args += (self.total.n_branches, self.total.n_partial_branches) args += (self.total.pc_covered_str,) if self.config.show_missing: args += ("",) self.writeout(fmt_coverage % args) # Write other final lines. if not self.total.n_files and not self.skipped_count: raise CoverageException("No data to report.") if self.config.skip_covered and self.skipped_count: self.writeout( fmt_skip_covered % (self.skipped_count, 's' if self.skipped_count > 1 else '') ) if self.config.skip_empty and self.empty_count: self.writeout( fmt_skip_empty % (self.empty_count, 's' if self.empty_count > 1 else '') ) return self.total.n_statements and self.total.pc_covered
def from_file(self, filename, our_file): """Read configuration from a .rc file. `filename` is a file name to read. `our_file` is True if this config file is specifically for coverage, False if we are examining another config file (tox.ini, setup.cfg) for possible settings. Returns True or False, whether the file could be read, and it had some coverage.py settings in it. """ _, ext = os.path.splitext(filename) if ext == '.toml': cp = TomlConfigParser(our_file) else: cp = HandyConfigParser(our_file) self.attempted_config_files.append(filename) try: files_read = cp.read(filename) except (configparser.Error, TomlDecodeError) as err: raise CoverageException("Couldn't read config file %s: %s" % (filename, err)) if not files_read: return False self.config_files_read.extend(map(os.path.abspath, files_read)) any_set = False try: for option_spec in self.CONFIG_FILE_OPTIONS: was_set = self._set_attr_from_config_option(cp, *option_spec) if was_set: any_set = True except ValueError as err: raise CoverageException("Couldn't read config file %s: %s" % (filename, err)) # Check that there are no unrecognized options. all_options = collections.defaultdict(set) for option_spec in self.CONFIG_FILE_OPTIONS: section, option = option_spec[1].split(":") all_options[section].add(option) for section, options in iitems(all_options): real_section = cp.has_section(section) if real_section: for unknown in set(cp.options(section)) - options: raise CoverageException( "Unrecognized option '[%s] %s=' in config file %s" % (real_section, unknown, filename)) # [paths] is special if cp.has_section('paths'): for option in cp.options('paths'): self.paths[option] = cp.getlist('paths', option) any_set = True # plugins can have options for plugin in self.plugins: if cp.has_section(plugin): self.plugin_options[plugin] = cp.get_section(plugin) any_set = True # Was this file used as a config file? If it's specifically our file, # then it was used. If we're piggybacking on someone else's file, # then it was only used if we found some settings in it. if our_file: used = True else: used = any_set if used: self.config_file = os.path.abspath(filename) with open(filename, "rb") as f: self._config_contents = f.read() return used
def combine_parallel_data(data, aliases=None, data_paths=None, strict=False, keep=False): """Combine a number of data files together. Treat `data.filename` as a file prefix, and combine the data from all of the data files starting with that prefix plus a dot. If `aliases` is provided, it's a `PathAliases` object that is used to re-map paths to match the local machine's. If `data_paths` is provided, it is a list of directories or files to combine. Directories are searched for files that start with `data.filename` plus dot as a prefix, and those files are combined. If `data_paths` is not provided, then the directory portion of `data.filename` is used as the directory to search for data files. Unless `keep` is True every data file found and combined is then deleted from disk. If a file cannot be read, a warning will be issued, and the file will not be deleted. If `strict` is true, and no files are found to combine, an error is raised. """ # Because of the os.path.abspath in the constructor, data_dir will # never be an empty string. data_dir, local = os.path.split(data.base_filename()) localdot = local + '.*' data_paths = data_paths or [data_dir] files_to_combine = [] for p in data_paths: if os.path.isfile(p): files_to_combine.append(os.path.abspath(p)) elif os.path.isdir(p): pattern = os.path.join(os.path.abspath(p), localdot) files_to_combine.extend(glob.glob(pattern)) else: raise CoverageException( "Couldn't combine from non-existent path '%s'" % (p, )) if strict and not files_to_combine: raise CoverageException("No data to combine") files_combined = 0 for f in files_to_combine: if f == data.data_filename(): # Sometimes we are combining into a file which is one of the # parallel files. Skip that file. if data._debug.should('dataio'): data._debug.write("Skipping combining ourself: %r" % (f, )) continue if data._debug.should('dataio'): data._debug.write("Combining data file %r" % (f, )) try: new_data = CoverageData(f, debug=data._debug) new_data.read() except CoverageException as exc: if data._warn: # The CoverageException has the file name in it, so just # use the message as the warning. data._warn(str(exc)) else: data.update(new_data, aliases=aliases) files_combined += 1 if not keep: if data._debug.should('dataio'): data._debug.write("Deleting combined data file %r" % (f, )) file_be_gone(f) if strict and not files_combined: raise CoverageException("No usable data files")