def _ptchd_fnd_dep_file_pth( # noqa: WPS231, WPS317 main_file, file_path, # noqa: WPS318 relative_path_search=False, # noqa: WPS317 ): abs_path = os.path.abspath(file_path) if not os.path.exists(abs_path) and ( # noqa: WPS337 file_path.endswith('.pxi') or # noqa: WPS318 relative_path_search): # files are looked up relative to the main source file rel_file_path = os.path.join(os.path.dirname(main_file), file_path) if os.path.exists(rel_file_path): abs_path = os.path.abspath(rel_file_path) # when file_path matches the main_file ending, that's it: matching_abs_path = ''.join(( os.path.splitext(main_file)[0], os.path.splitext(file_path)[1], )) if matching_abs_path.endswith(file_path): return matching_abs_path # search sys.path for external locations if a valid file hasn't been found if not os.path.exists(abs_path): for sys_path in sys.path: test_path = os.path.realpath(os.path.join(sys_path, file_path)) if os.path.exists(test_path): return canonical_filename(test_path) return canonical_filename(abs_path)
def test_combining_with_aliases(self): covdata1 = CoverageData() covdata1.set_lines({ '/home/ned/proj/src/a.py': {1: None, 2: None}, '/home/ned/proj/src/sub/b.py': {3: None}, '/home/ned/proj/src/template.html': {10: None}, }) covdata1.set_file_tracers({ '/home/ned/proj/src/template.html': 'html.plugin', }) self.data_files.write(covdata1, suffix='1') covdata2 = CoverageData() covdata2.set_lines({ r'c:\ned\test\a.py': {4: None, 5: None}, r'c:\ned\test\sub\b.py': {3: None, 6: None}, }) self.data_files.write(covdata2, suffix='2') covdata3 = CoverageData() aliases = PathAliases() aliases.add("/home/ned/proj/src/", "./") aliases.add(r"c:\ned\test", "./") self.data_files.combine_parallel_data(covdata3, aliases=aliases) apy = canonical_filename('./a.py') sub_bpy = canonical_filename('./sub/b.py') template_html = canonical_filename('./template.html') self.assert_line_counts(covdata3, {apy: 4, sub_bpy: 2, template_html: 1}, fullpath=True) self.assert_measured_files(covdata3, [apy, sub_bpy, template_html]) self.assertEqual(covdata3.file_tracer(template_html), 'html.plugin')
def test_combining_with_aliases(self): covdata1 = CoverageData() covdata1.add_line_data({ '/home/ned/proj/src/a.py': {1: None, 2: None}, '/home/ned/proj/src/sub/b.py': {3: None}, }) covdata1.write(suffix='1') covdata2 = CoverageData() covdata2.add_line_data({ r'c:\ned\test\a.py': {4: None, 5: None}, r'c:\ned\test\sub\b.py': {6: None}, }) covdata2.write(suffix='2') covdata3 = CoverageData() aliases = PathAliases() aliases.add("/home/ned/proj/src/", "./") aliases.add(r"c:\ned\test", "./") covdata3.combine_parallel_data(aliases=aliases) apy = canonical_filename('./a.py') sub_bpy = canonical_filename('./sub/b.py') self.assert_summary( covdata3, { apy: 4, sub_bpy: 2, }, fullpath=True ) self.assert_measured_files(covdata3, [apy,sub_bpy])
def test_combining_with_aliases(self): covdata1 = CoverageData() covdata1.add_lines({ '/home/ned/proj/src/a.py': {1: None, 2: None}, '/home/ned/proj/src/sub/b.py': {3: None}, '/home/ned/proj/src/template.html': {10: None}, }) covdata1.add_file_tracers({ '/home/ned/proj/src/template.html': 'html.plugin', }) self.data_files.write(covdata1, suffix='1') covdata2 = CoverageData() covdata2.add_lines({ r'c:\ned\test\a.py': {4: None, 5: None}, r'c:\ned\test\sub\b.py': {3: None, 6: None}, }) self.data_files.write(covdata2, suffix='2') covdata3 = CoverageData() aliases = PathAliases() aliases.add("/home/ned/proj/src/", "./") aliases.add(r"c:\ned\test", "./") self.data_files.combine_parallel_data(covdata3, aliases=aliases) apy = canonical_filename('./a.py') sub_bpy = canonical_filename('./sub/b.py') template_html = canonical_filename('./template.html') self.assert_line_counts(covdata3, {apy: 4, sub_bpy: 2, template_html: 1}, fullpath=True) self.assert_measured_files(covdata3, [apy, sub_bpy, template_html]) self.assertEqual(covdata3.file_tracer(template_html), 'html.plugin')
def test_combining_with_aliases(self): covdata1 = CoverageData(suffix='1') covdata1.add_lines({ '/home/ned/proj/src/a.py': { 1: None, 2: None }, '/home/ned/proj/src/sub/b.py': { 3: None }, '/home/ned/proj/src/template.html': { 10: None }, }) covdata1.add_file_tracers({ '/home/ned/proj/src/template.html': 'html.plugin', }) covdata1.write() covdata2 = CoverageData(suffix='2') covdata2.add_lines({ r'c:\ned\test\a.py': { 4: None, 5: None }, r'c:\ned\test\sub\b.py': { 3: None, 6: None }, }) covdata2.write() self.assert_file_count(".coverage.*", 2) covdata3 = CoverageData() aliases = PathAliases() aliases.add("/home/ned/proj/src/", "./") aliases.add(r"c:\ned\test", "./") combine_parallel_data(covdata3, aliases=aliases) self.assert_file_count(".coverage.*", 0) # covdata3 hasn't been written yet. Should this file exist or not? #self.assert_exists(".coverage") apy = canonical_filename('./a.py') sub_bpy = canonical_filename('./sub/b.py') template_html = canonical_filename('./template.html') self.assert_line_counts(covdata3, { apy: 4, sub_bpy: 2, template_html: 1 }, fullpath=True) self.assert_measured_files(covdata3, [apy, sub_bpy, template_html]) assert covdata3.file_tracer(template_html) == 'html.plugin'
def test_canonical_filename_ensure_cache_hit(self): self.make_file("sub/proj1/file1.py") d = actual_path(self.abs_path("sub/proj1")) os.chdir(d) files.set_relative_directory() canonical_path = files.canonical_filename('sub/proj1/file1.py') assert canonical_path == self.abs_path('file1.py') # After the filename has been converted, it should be in the cache. assert 'sub/proj1/file1.py' in files.CANONICAL_FILENAME_CACHE assert files.canonical_filename('sub/proj1/file1.py') == self.abs_path('file1.py')
def test_canonical_filename_ensure_cache_hit(self): self.make_file("sub/proj1/file1.py") d = actual_path(self.abs_path("sub/proj1")) self.chdir(d) files.set_relative_directory() canonical_path = files.canonical_filename('sub/proj1/file1.py') self.assertEqual(canonical_path, self.abs_path('file1.py')) # After the filename has been converted, it should be in the cache. self.assertIn('sub/proj1/file1.py', files.CANONICAL_FILENAME_CACHE) self.assertEqual( files.canonical_filename('sub/proj1/file1.py'), self.abs_path('file1.py'))
def _find_dep_file_path(main_file, file_path): abs_path = os.path.abspath(file_path) if file_path.endswith('.pxi') and not os.path.exists(abs_path): # include files are looked up relative to the main source file pxi_file_path = os.path.join(os.path.dirname(main_file), file_path) if os.path.exists(pxi_file_path): abs_path = os.path.abspath(pxi_file_path) # search sys.path for external locations if a valid file hasn't been found if not os.path.exists(abs_path): for sys_path in sys.path: test_path = os.path.realpath(os.path.join(sys_path, file_path)) if os.path.exists(test_path): return canonical_filename(test_path) return canonical_filename(abs_path)
def __init__(self, morf, coverage=None): self.coverage = coverage if hasattr(morf, '__file__'): filename = morf.__file__ elif isinstance(morf, types.ModuleType): # A module should have had .__file__, otherwise we can't use it. # This could be a PEP-420 namespace package. raise CoverageException("Module {0} has no file".format(morf)) else: filename = morf filename = source_for_file(files.unicode_filename(filename)) super(PythonFileReporter, self).__init__(files.canonical_filename(filename)) if hasattr(morf, '__name__'): name = morf.__name__.replace(".", os.sep) if os.path.basename(filename).startswith('__init__.'): name += os.sep + "__init__" name += ".py" name = files.unicode_filename(name) else: name = files.relative_filename(filename) self.relname = name self._source = None self._parser = None self._statements = None self._excluded = None
def file_tracer(self, filename): """ Try to find a C source file for a file path found by the tracer. """ if filename.startswith('<') or filename.startswith('memory:'): return None c_file = py_file = None filename = canonical_filename(os.path.abspath(filename)) if self._c_files_map and filename in self._c_files_map: c_file = self._c_files_map[filename][0] if c_file is None: c_file, py_file = self._find_source_files(filename) if not c_file: return None # unknown file # parse all source file paths and lines from C file # to learn about all relevant source files right away (pyx/pxi/pxd) # FIXME: this might already be too late if the first executed line # is not from the main .pyx file but a file with a different # name than the .c file (which prevents us from finding the # .c file) _, code = self._parse_lines(c_file, filename) if code is None: return None # no source found if self._file_path_map is None: self._file_path_map = {} return CythonModuleTracer(filename, py_file, c_file, self._c_files_map, self._file_path_map)
def test_tree_matcher(self): matches_to_try = [ (self.make_file("sub/file1.py"), True), (self.make_file("sub/file2.c"), True), (self.make_file("sub2/file3.h"), False), (self.make_file("sub3/file4.py"), True), (self.make_file("sub3/file5.c"), False), ] trees = [ files.canonical_filename("sub"), files.canonical_filename("sub3/file4.py"), ] tm = TreeMatcher(trees) self.assertEqual(tm.info(), trees) for filepath, matches in matches_to_try: self.assertMatches(tm, filepath, matches)
def display_arcs(data_file): cov = coverage.Coverage(data_file=data_file, branch=True) cov.load() covdata = cov.get_data() print("Has ars: %d" % covdata.has_arcs()) cf = files.canonical_filename(__file__) print("Executed Arcs: " + str(covdata.arcs(cf)))
def get_data(self): """Get the collected data and reset the collector. Also warn about various problems collecting data. Returns a :class:`coverage.CoverageData`, the collected coverage data. .. versionadded:: 4.0 """ self._init() if not self._measured: return self.collector.save_data(self.data) # If there are still entries in the source_pkgs list, then we never # encountered those packages. if self._warn_unimported_source: for pkg in self.source_pkgs: if pkg not in sys.modules: self._warn("Module %s was never imported." % pkg) elif not ( hasattr(sys.modules[pkg], '__file__') and os.path.exists(sys.modules[pkg].__file__) ): self._warn("Module %s has no Python source." % pkg) else: self._warn("Module %s was previously imported, but not measured." % pkg) # Find out if we got any data. if not self.data and self._warn_no_data: self._warn("No data was collected.") # Find files that were never executed at all. for src in self.source: for py_file in find_python_files(src): py_file = files.canonical_filename(py_file) if self.omit_match and self.omit_match.match(py_file): # Turns out this file was omitted, so don't pull it back # in as unexecuted. continue self.data.touch_file(py_file) # Add run information. self.data.add_run_info( brief_sys=" ".join([ platform.python_implementation(), platform.python_version(), platform.system(), ]) ) if self.config.note: self.data.add_run_info(note=self.config.note) self._measured = False return self.data
def __init__(self, morf, coverage=None): self.coverage = coverage if hasattr(morf, '__file__'): filename = morf.__file__ else: filename = morf filename = files.unicode_filename(filename) # .pyc files should always refer to a .py instead. if filename.endswith(('.pyc', '.pyo')): filename = filename[:-1] elif filename.endswith('$py.class'): # Jython filename = filename[:-9] + ".py" super(PythonFileReporter, self).__init__(files.canonical_filename(filename)) if hasattr(morf, '__name__'): name = morf.__name__ name = name.replace(".", os.sep) + ".py" name = files.unicode_filename(name) else: name = files.relative_filename(filename) self.relname = name self._source = None self._parser = None self._statements = None self._excluded = None
def __init__(self, morf, coverage=None): self.coverage = coverage if hasattr(morf, '__file__'): filename = morf.__file__ elif isinstance(morf, types.ModuleType): # A module should have had .__file__, otherwise we can't use it. # This could be a PEP-420 namespace package. raise CoverageException("Module {0} has no file".format(morf)) else: filename = morf filename = files.unicode_filename(filename) # .pyc files should always refer to a .py instead. if filename.endswith(('.pyc', '.pyo')): filename = filename[:-1] elif filename.endswith('$py.class'): # Jython filename = filename[:-9] + ".py" super(PythonFileReporter, self).__init__(files.canonical_filename(filename)) if hasattr(morf, '__name__'): name = morf.__name__ name = name.replace(".", os.sep) + ".py" name = files.unicode_filename(name) else: name = files.relative_filename(filename) self.relname = name self._source = None self._parser = None self._statements = None self._excluded = None
def file_tracer(self, filename): """ Try to find a C source file for a file path found by the tracer. """ # TODO We need to pxd-files to the include map. For more info see pybuild.py # Currently skip such files, because they are not supported in Arcadia pybuild with coverage. if os.path.splitext(filename)[-1] not in ('.pyx', '.pxi'): return None if filename.startswith('<') or filename.startswith('memory:'): return None c_file = py_file = None filename = canonical_filename(filename) if self._c_files_map and filename in self._c_files_map: c_file = self._c_files_map[filename][0] if c_file is None: c_file, py_file = self._find_source_files(filename) if not c_file: return None # unknown file # parse all source file paths and lines from C file # to learn about all relevant source files right away (pyx/pxi/pxd) # FIXME: this might already be too late if the first executed line # is not from the main .pyx file but a file with a different # name than the .c file (which prevents us from finding the # .c file) _, code = self._read_source_lines(c_file, filename) if code is None: return None # no source found if self._file_path_map is None: self._file_path_map = {} return CythonModuleTracer(filename, py_file, c_file, self._c_files_map, self._file_path_map)
def file_tracer(self, filename): """ Try to find a C source file for a file path found by the tracer. """ if filename.startswith('<') or filename.startswith('memory:'): return None c_file = py_file = None filename = canonical_filename(os.path.abspath(filename)) if self._c_files_map and filename in self._c_files_map: c_file = self._c_files_map[filename][0] if c_file is None: c_file, py_file = self._find_source_files(filename) if not c_file: return None # parse all source file paths and lines from C file # to learn about all relevant source files right away (pyx/pxi/pxd) # FIXME: this might already be too late if the first executed line # is not from the main .pyx file but a file with a different # name than the .c file (which prevents us from finding the # .c file) self._parse_lines(c_file, filename) if self._file_path_map is None: self._file_path_map = {} return CythonModuleTracer(filename, py_file, c_file, self._c_files_map, self._file_path_map)
def assertMatches(self, matcher, filepath, matches): """The `matcher` should agree with `matches` about `filepath`.""" canonical = files.canonical_filename(filepath) self.assertEqual( matcher.match(canonical), matches, "File %s should have matched as %s" % (filepath, matches) )
def get_data(self): """Get the collected data and reset the collector. Also warn about various problems collecting data. Returns: :class:`CoverageData`: the collected coverage data. """ self._init() if not self._measured: return def abs_file_dict(d): """Return a dict like d, but with keys modified by `abs_file`.""" return dict((abs_file(k), v) for k,v in iitems(d)) # TODO: seems like this parallel structure is getting kinda old... self.data.add_lines(abs_file_dict(self.collector.get_line_data())) self.data.add_arcs(abs_file_dict(self.collector.get_arc_data())) self.data.add_plugins(abs_file_dict(self.collector.get_plugin_data())) self.collector.reset() # If there are still entries in the source_pkgs list, then we never # encountered those packages. if self._warn_unimported_source: for pkg in self.source_pkgs: if pkg not in sys.modules: self._warn("Module %s was never imported." % pkg) elif not ( hasattr(sys.modules[pkg], '__file__') and os.path.exists(sys.modules[pkg].__file__) ): self._warn("Module %s has no Python source." % pkg) else: self._warn( "Module %s was previously imported, " "but not measured." % pkg ) # Find out if we got any data. summary = self.data.summary() if not summary and self._warn_no_data: self._warn("No data was collected.") # Find files that were never executed at all. for src in self.source: for py_file in find_python_files(src): py_file = files.canonical_filename(py_file) if self.omit_match and self.omit_match.match(py_file): # Turns out this file was omitted, so don't pull it back # in as unexecuted. continue self.data.touch_file(py_file) self._measured = False return self.data
def assert_mapped(self, aliases, inp, out): """Assert that `inp` mapped through `aliases` produces `out`. `out` is canonicalized first, since aliases always produce canonicalized paths. """ self.assertEqual(aliases.map(inp), files.canonical_filename(out))
def assert_mapped(self, aliases, inp, out, relative=False): """Assert that `inp` mapped through `aliases` produces `out`. `out` is canonicalized first, since aliases produce canonicalized paths by default. """ mapped = aliases.map(inp) expected = files.canonical_filename(out) if not relative else out assert mapped == expected
def _harvest_data(self): """Get the collected data and reset the collector. Also warn about various problems collecting data. """ self._init() if not self._measured: return def abs_file_dict(d): """Return a dict like d, but with keys modified by `abs_file`.""" return dict((abs_file(k), v) for k,v in iitems(d)) # TODO: seems like this parallel structure is getting kinda old... self.data.add_line_data(abs_file_dict(self.collector.get_line_data())) self.data.add_arc_data(abs_file_dict(self.collector.get_arc_data())) self.data.add_plugin_data(abs_file_dict(self.collector.get_plugin_data())) self.collector.reset() # If there are still entries in the source_pkgs list, then we never # encountered those packages. if self._warn_unimported_source: for pkg in self.source_pkgs: if pkg not in sys.modules: self._warn("Module %s was never imported." % pkg) elif not ( hasattr(sys.modules[pkg], '__file__') and os.path.exists(sys.modules[pkg].__file__) ): self._warn("Module %s has no Python source." % pkg) else: self._warn( "Module %s was previously imported, " "but not measured." % pkg ) # Find out if we got any data. summary = self.data.summary() if not summary and self._warn_no_data: self._warn("No data was collected.") # Find files that were never executed at all. for src in self.source: for py_file in find_python_files(src): py_file = files.canonical_filename(py_file) if self.omit_match and self.omit_match.match(py_file): # Turns out this file was omitted, so don't pull it back # in as unexecuted. continue self.data.touch_file(py_file) self._measured = False
def assert_mapped(self, aliases, inp, out): """Assert that `inp` mapped through `aliases` produces `out`. `out` is canonicalized first, since aliases always produce canonicalized paths. """ aliases.pprint() print(inp) print(out) assert aliases.map(inp) == files.canonical_filename(out)
def __init__(self, coverage, config): super(XmlReporter, self).__init__(coverage, config) self.source_paths = set() if config.source: for src in config.source: if os.path.exists(src): self.source_paths.add(files.canonical_filename(src)) self.packages = {} self.xml_out = None self.has_arcs = coverage.data.has_arcs()
def canonical_path(morf, directory=False): """Return the canonical path of the module or file `morf`. If the module is a package, then return its directory. If it is a module, then return its file, unless `directory` is True, in which case return its enclosing directory. """ morf_path = canonical_filename(source_for_morf(morf)) if morf_path.endswith("__init__.py") or directory: morf_path = os.path.split(morf_path)[0] return morf_path
def test_tree_matcher(self): case_folding = env.WINDOWS matches_to_try = [ (self.make_file("sub/file1.py"), True), (self.make_file("sub/file2.c"), True), (self.make_file("sub2/file3.h"), False), (self.make_file("sub3/file4.py"), True), (self.make_file("sub3/file5.c"), False), (self.make_file("sub4/File5.py"), case_folding), (self.make_file("sub5/file6.py"), case_folding), ] trees = [ files.canonical_filename("sub"), files.canonical_filename("sub3/file4.py"), files.canonical_filename("sub4/file5.py"), files.canonical_filename("SUB5/file6.py"), ] tm = TreeMatcher(trees, "test") assert tm.info() == trees for filepath, matches in matches_to_try: self.assertMatches(tm, filepath, matches)
def prepare(self): """Set sys.path properly. This needs to happen before any importing, and without importing anything. """ if self.as_module: if env.PYBEHAVIOR.actual_syspath0_dash_m: path0 = os.getcwd() else: path0 = "" elif os.path.isdir(self.arg0): # Running a directory means running the __main__.py file in that # directory. path0 = self.arg0 else: path0 = os.path.abspath(os.path.dirname(self.arg0)) if os.path.isdir(sys.path[0]): # sys.path fakery. If we are being run as a command, then sys.path[0] # is the directory of the "coverage" script. If this is so, replace # sys.path[0] with the directory of the file we're running, or the # current directory when running modules. If it isn't so, then we # don't know what's going on, and just leave it alone. top_file = inspect.stack()[-1][0].f_code.co_filename sys_path_0_abs = os.path.abspath(sys.path[0]) top_file_dir_abs = os.path.abspath(os.path.dirname(top_file)) sys_path_0_abs = canonical_filename(sys_path_0_abs) top_file_dir_abs = canonical_filename(top_file_dir_abs) if sys_path_0_abs != top_file_dir_abs: path0 = None else: # sys.path[0] is a file. Is the next entry the directory containing # that file? if sys.path[1] == os.path.dirname(sys.path[0]): # Can it be right to always remove that? del sys.path[1] if path0 is not None: sys.path[0] = python_reported_file(path0)
def __init__(self, coverage): self.coverage = coverage self.config = self.coverage.config self.source_paths = set() if self.config.source: for src in self.config.source: if os.path.exists(src): if not self.config.relative_files: src = files.canonical_filename(src) self.source_paths.add(src) self.packages = {} self.xml_out = None
def __init__(self, coverage): self.coverage = coverage self.config = self.coverage.config self.source_paths = set() if self.config.source: for src in self.config.source: if os.path.exists(src): self.source_paths.add(files.canonical_filename(src)) self.packages = {} self.xml_out = None self.data = coverage.get_data() self.has_arcs = self.data.has_arcs()
def extract_names(self, fr): filename = fr.filename.replace("\\", "/") for source_path in self.source_paths: source_path = files.canonical_filename(source_path) if filename.startswith(source_path.replace("\\", "/") + "/"): rel_name = filename[len(source_path) + 1:] break else: rel_name = fr.relative_filename() self.source_paths.add(fr.filename[:-len(rel_name)].rstrip(r"\/")) dirname = os.path.dirname(rel_name) or u"." dirname = "/".join(dirname.split("/")[:self.config.xml_package_depth]) return dirname, rel_name
def _find_unexecuted_files(self, src_dir): """Find unexecuted files in `src_dir`. Search for files in `src_dir` that are probably importable, and add them as unexecuted files in `self.data`. """ for py_file in find_python_files(src_dir): py_file = files.canonical_filename(py_file) if self.omit_match and self.omit_match.match(py_file): # Turns out this file was omitted, so don't pull it back # in as unexecuted. continue self.data.touch_file(py_file)
def file_reporter(self, filename): # TODO: let coverage.py handle .py files itself #ext = os.path.splitext(filename)[1].lower() #if ext == '.py': # from coverage.python import PythonFileReporter # return PythonFileReporter(filename) filename = canonical_filename(os.path.abspath(filename)) if self._c_files_map and filename in self._c_files_map: c_file, rel_file_path, code = self._c_files_map[filename] else: c_file, _ = self._find_source_files(filename) if not c_file: return None # unknown file rel_file_path, code = self._parse_lines(c_file, filename) return CythonModuleReporter(c_file, filename, rel_file_path, code)
def __init__(self, coverage, report_name=None): self.coverage = coverage self.config = self.coverage.config # self.report_name = report_name # self.source_paths = set() if self.config.source: for src in self.config.source: if os.path.exists(src): if not self.config.relative_files: src = files.canonical_filename(src) self.source_paths.add(src) self.packages = {} self.xml_out = None self.is_class_level = False
def _find_unexecuted_files(self, src_dir): """Find unexecuted files in `src_dir`. Search for files in `src_dir` that are probably importable, and add them as unexecuted files in `self.data`. """ py_files = ((py_file, None) for py_file in find_python_files(src_dir)) plugin_files = self._find_plugin_files(src_dir) for file_path, plugin_name in itertools.chain(py_files, plugin_files): file_path = canonical_filename(file_path) if self.omit_match and self.omit_match.match(file_path): # Turns out this file was omitted, so don't pull it back # in as unexecuted. continue self.data.touch_file(file_path, plugin_name)
def __init__(self, morf, coverage=None): self.coverage = coverage filename = source_for_morf(morf) super().__init__(canonical_filename(filename)) if hasattr(morf, '__name__'): name = morf.__name__.replace(".", os.sep) if os.path.basename(filename).startswith('__init__.'): name += os.sep + "__init__" name += ".py" else: name = relative_filename(filename) self.relname = name self._source = None self._parser = None self._excluded = None
def should_trace(self, original_filename, frame): # pragma: no cover disp = FileDisposition() assert original_filename is not None disp.original_filename = original_filename disp.canonical_filename = encoded_filepath( canonical_filename(original_filename)) disp.source_filename = disp.canonical_filename disp.reason = '' disp.file_tracer = None disp.has_dynamic_filename = False disp.trace = hypothesis_check_include(disp.canonical_filename) if not disp.trace: disp.reason = 'hypothesis internal reasons' elif self.__existing_collector is not None: check = self.__existing_collector.should_trace( original_filename, frame) if check.trace: self.files_to_propagate.add(check.canonical_filename) return disp
def __init__(self, morf, coverage=None): self.coverage = coverage filename = source_for_morf(morf) super(PythonFileReporter, self).__init__(files.canonical_filename(filename)) if hasattr(morf, '__name__'): name = morf.__name__.replace(".", os.sep) if os.path.basename(filename).startswith('__init__.'): name += os.sep + "__init__" name += ".py" name = files.unicode_filename(name) else: name = files.relative_filename(filename) self.relname = name self._source = None self._parser = None self._statements = None self._excluded = None
def _should_trace_internal(self, filename, frame): """Decide whether to trace execution in `filename`, with a reason. This function is called from the trace function. As each new file name is encountered, this function determines whether it is traced or not. Returns a FileDisposition object. """ original_filename = filename disp = _disposition_init(self.collector.file_disposition_class, filename) def nope(disp, reason): """Simple helper to make it easy to return NO.""" disp.trace = False disp.reason = reason return disp # Compiled Python files have two file names: frame.f_code.co_filename is # the file name at the time the .pyc was compiled. The second name is # __file__, which is where the .pyc was actually loaded from. Since # .pyc files can be moved after compilation (for example, by being # installed), we look for __file__ in the frame and prefer it to the # co_filename value. dunder_file = frame.f_globals.get('__file__') if dunder_file: filename = self._source_for_file(dunder_file) if original_filename and not original_filename.startswith('<'): orig = os.path.basename(original_filename) if orig != os.path.basename(filename): # Files shouldn't be renamed when moved. This happens when # exec'ing code. If it seems like something is wrong with # the frame's file name, then just use the original. filename = original_filename if not filename: # Empty string is pretty useless. return nope(disp, "empty string isn't a file name") if filename.startswith('memory:'): return nope(disp, "memory isn't traceable") if filename.startswith('<'): # Lots of non-file execution is represented with artificial # file names like "<string>", "<doctest readme.txt[0]>", or # "<exec_function>". Don't ever trace these executions, since we # can't do anything with the data later anyway. return nope(disp, "not a real file name") # pyexpat does a dumb thing, calling the trace function explicitly from # C code with a C file name. if re.search(r"[/\\]Modules[/\\]pyexpat.c", filename): return nope(disp, "pyexpat lies about itself") # Jython reports the .class file to the tracer, use the source file. if filename.endswith("$py.class"): filename = filename[:-9] + ".py" canonical = files.canonical_filename(filename) disp.canonical_filename = canonical # Try the plugins, see if they have an opinion about the file. plugin = None for plugin in self.plugins.file_tracers: if not plugin._coverage_enabled: continue try: file_tracer = plugin.file_tracer(canonical) if file_tracer is not None: file_tracer._coverage_plugin = plugin disp.trace = True disp.file_tracer = file_tracer if file_tracer.has_dynamic_source_filename(): disp.has_dynamic_filename = True else: disp.source_filename = files.canonical_filename( file_tracer.source_filename() ) break except Exception: self._warn( "Disabling plugin %r due to an exception:" % ( plugin._coverage_plugin_name ) ) traceback.print_exc() plugin._coverage_enabled = False continue else: # No plugin wanted it: it's Python. disp.trace = True disp.source_filename = canonical if not disp.has_dynamic_filename: if not disp.source_filename: raise CoverageException( "Plugin %r didn't set source_filename for %r" % (plugin, disp.original_filename) ) reason = self._check_include_omit_etc_internal( disp.source_filename, frame, ) if reason: nope(disp, reason) return disp
def _init(self): """Set all the initial state. This is called by the public methods to initialize state. This lets us construct a :class:`Coverage` object, then tweak its state before this function is called. """ if self._inited: return # Create and configure the debugging controller. COVERAGE_DEBUG_FILE # is an environment variable, the name of a file to append debug logs # to. if self._debug_file is None: debug_file_name = os.environ.get("COVERAGE_DEBUG_FILE") if debug_file_name: self._debug_file = open(debug_file_name, "a") else: self._debug_file = sys.stderr self.debug = DebugControl(self.config.debug, self._debug_file) # Load plugins self.plugins = Plugins.load_plugins(self.config.plugins, self.config, self.debug) # _exclude_re is a dict that maps exclusion list names to compiled # regexes. self._exclude_re = {} self._exclude_regex_stale() files.set_relative_directory() # The source argument can be directories or package names. self.source = [] self.source_pkgs = [] for src in self.config.source or []: if os.path.exists(src): self.source.append(files.canonical_filename(src)) else: self.source_pkgs.append(src) self.omit = prep_patterns(self.config.omit) self.include = prep_patterns(self.config.include) concurrency = self.config.concurrency if concurrency == "multiprocessing": patch_multiprocessing() concurrency = None self.collector = Collector( should_trace=self._should_trace, check_include=self._check_include_omit_etc, timid=self.config.timid, branch=self.config.branch, warn=self._warn, concurrency=concurrency, ) # Early warning if we aren't going to be able to support plugins. if self.plugins.file_tracers and not self.collector.supports_plugins: self._warn( "Plugin file tracers (%s) aren't supported with %s" % ( ", ".join( plugin._coverage_plugin_name for plugin in self.plugins.file_tracers ), self.collector.tracer_name(), ) ) for plugin in self.plugins.file_tracers: plugin._coverage_enabled = False # Suffixes are a bit tricky. We want to use the data suffix only when # collecting data, not when combining data. So we save it as # `self.run_suffix` now, and promote it to `self.data_suffix` if we # find that we are collecting data later. if self._data_suffix or self.config.parallel: if not isinstance(self._data_suffix, string_class): # if data_suffix=True, use .machinename.pid.random self._data_suffix = True else: self._data_suffix = None self.data_suffix = None self.run_suffix = self._data_suffix # Create the data file. We do this at construction time so that the # data file will be written into the directory where the process # started rather than wherever the process eventually chdir'd to. self.data = CoverageData(debug=self.debug) self.data_files = CoverageDataFiles(basename=self.config.data_file, warn=self._warn) # The directories for files considered "installed with the interpreter". self.pylib_dirs = set() if not self.config.cover_pylib: # Look at where some standard modules are located. That's the # indication for "installed with the interpreter". In some # environments (virtualenv, for example), these modules may be # spread across a few locations. Look at all the candidate modules # we've imported, and take all the different ones. for m in (atexit, inspect, os, platform, re, _structseq, traceback): if m is not None and hasattr(m, "__file__"): self.pylib_dirs.add(self._canonical_dir(m)) if _structseq and not hasattr(_structseq, '__file__'): # PyPy 2.4 has no __file__ in the builtin modules, but the code # objects still have the file names. So dig into one to find # the path to exclude. structseq_new = _structseq.structseq_new try: structseq_file = structseq_new.func_code.co_filename except AttributeError: structseq_file = structseq_new.__code__.co_filename self.pylib_dirs.add(self._canonical_dir(structseq_file)) # To avoid tracing the coverage.py code itself, we skip anything # located where we are. self.cover_dirs = [self._canonical_dir(__file__)] if env.TESTING: # When testing, we use PyContracts, which should be considered # part of coverage.py, and it uses six. Exclude those directories # just as we exclude ourselves. import contracts, six for mod in [contracts, six]: self.cover_dirs.append(self._canonical_dir(mod)) # Set the reporting precision. Numbers.set_precision(self.config.precision) atexit.register(self._atexit) self._inited = True # Create the matchers we need for _should_trace if self.source or self.source_pkgs: self.source_match = TreeMatcher(self.source) self.source_pkgs_match = ModuleMatcher(self.source_pkgs) else: if self.cover_dirs: self.cover_match = TreeMatcher(self.cover_dirs) if self.pylib_dirs: self.pylib_match = TreeMatcher(self.pylib_dirs) if self.include: self.include_match = FnmatchMatcher(self.include) if self.omit: self.omit_match = FnmatchMatcher(self.omit) # The user may want to debug things, show info if desired. wrote_any = False if self.debug.should('config'): config_info = sorted(self.config.__dict__.items()) self.debug.write_formatted_info("config", config_info) wrote_any = True if self.debug.should('sys'): self.debug.write_formatted_info("sys", self.sys_info()) for plugin in self.plugins: header = "sys: " + plugin._coverage_plugin_name info = plugin.sys_info() self.debug.write_formatted_info(header, info) wrote_any = True if wrote_any: self.debug.write_formatted_info("end", ())
def configure(self, config): """Apply the configuration to get ready for decision-time.""" for src in config.source or []: if os.path.isdir(src): self.source.append(canonical_filename(src)) else: self.source_pkgs.append(src) self.source_pkgs_unmatched = self.source_pkgs[:] self.omit = prep_patterns(config.run_omit) self.include = prep_patterns(config.run_include) # The directories for files considered "installed with the interpreter". self.pylib_paths = set() if not config.cover_pylib: # Look at where some standard modules are located. That's the # indication for "installed with the interpreter". In some # environments (virtualenv, for example), these modules may be # spread across a few locations. Look at all the candidate modules # we've imported, and take all the different ones. for m in (atexit, inspect, os, platform, _pypy_irc_topic, re, _structseq, traceback): if m is not None and hasattr(m, "__file__"): self.pylib_paths.add(canonical_path(m, directory=True)) if _structseq and not hasattr(_structseq, '__file__'): # PyPy 2.4 has no __file__ in the builtin modules, but the code # objects still have the file names. So dig into one to find # the path to exclude. The "filename" might be synthetic, # don't be fooled by those. structseq_new = _structseq.structseq_new try: structseq_file = structseq_new.func_code.co_filename except AttributeError: structseq_file = structseq_new.__code__.co_filename if not structseq_file.startswith("<"): self.pylib_paths.add(canonical_path(structseq_file)) # To avoid tracing the coverage.py code itself, we skip anything # located where we are. self.cover_paths = [canonical_path(__file__, directory=True)] if env.TESTING: # Don't include our own test code. self.cover_paths.append(os.path.join(self.cover_paths[0], "tests")) # When testing, we use PyContracts, which should be considered # part of coverage.py, and it uses six. Exclude those directories # just as we exclude ourselves. import contracts import six for mod in [contracts, six]: self.cover_paths.append(canonical_path(mod)) # Create the matchers we need for should_trace if self.source or self.source_pkgs: self.source_match = TreeMatcher(self.source) self.source_pkgs_match = ModuleMatcher(self.source_pkgs) else: if self.cover_paths: self.cover_match = TreeMatcher(self.cover_paths) if self.pylib_paths: self.pylib_match = TreeMatcher(self.pylib_paths) if self.include: self.include_match = FnmatchMatcher(self.include) if self.omit: self.omit_match = FnmatchMatcher(self.omit)