def test_simple(self): self.make_file("hello.py") files.set_relative_directory() self.assertEqual(files.relative_filename(u"hello.py"), u"hello.py") a = self.abs_path("hello.py") self.assertNotEqual(a, "hello.py") self.assertEqual(files.relative_filename(a), "hello.py")
def _init(self): """Set all the initial state. This is called by the public methods to initialize state. This lets us construct a :class:`Coverage` object, then tweak its state before this function is called. """ if self._inited: return self._inited = True # Create and configure the debugging controller. COVERAGE_DEBUG_FILE # is an environment variable, the name of a file to append debug logs # to. self._debug = DebugControl(self.config.debug, self._debug_file) # _exclude_re is a dict that maps exclusion list names to compiled regexes. self._exclude_re = {} set_relative_directory() # Load plugins self._plugins = Plugins.load_plugins(self.config.plugins, self.config, self._debug) # Run configuring plugins. for plugin in self._plugins.configurers: # We need an object with set_option and get_option. Either self or # self.config will do. Choosing randomly stops people from doing # other things with those objects, against the public API. Yes, # this is a bit childish. :) plugin.configure([self, self.config][int(time.time()) % 2])
def test_simple(self): self.make_file("hello.py") files.set_relative_directory() assert files.relative_filename(u"hello.py") == u"hello.py" a = self.abs_path("hello.py") assert a != "hello.py" assert files.relative_filename(a) == "hello.py"
def test_simple(self): self.make_file("hello.py") files.set_relative_directory() self.assertEqual(files.relative_filename("hello.py"), "hello.py") a = self.abs_path("hello.py") self.assertNotEqual(a, "hello.py") self.assertEqual(files.relative_filename(a), "hello.py")
def test_peer_directories(self): self.make_file("sub/proj1/file1.py") self.make_file("sub/proj2/file2.py") a1 = self.abs_path("sub/proj1/file1.py") a2 = self.abs_path("sub/proj2/file2.py") d = os.path.normpath("sub/proj1") self.chdir(d) files.set_relative_directory() self.assertEqual(files.relative_filename(a1), "file1.py") self.assertEqual(files.relative_filename(a2), a2)
def test_canonical_filename_ensure_cache_hit(self): self.make_file("sub/proj1/file1.py") d = actual_path(self.abs_path("sub/proj1")) os.chdir(d) files.set_relative_directory() canonical_path = files.canonical_filename('sub/proj1/file1.py') assert canonical_path == self.abs_path('file1.py') # After the filename has been converted, it should be in the cache. assert 'sub/proj1/file1.py' in files.CANONICAL_FILENAME_CACHE assert files.canonical_filename('sub/proj1/file1.py') == self.abs_path('file1.py')
def test_peer_directories(self): self.make_file("sub/proj1/file1.py") self.make_file("sub/proj2/file2.py") a1 = self.abs_path("sub/proj1/file1.py") a2 = self.abs_path("sub/proj2/file2.py") d = os.path.normpath("sub/proj1") os.chdir(d) files.set_relative_directory() self.assertEqual(files.relative_filename(a1), "file1.py") self.assertEqual(files.relative_filename(a2), a2)
def test_filepath_contains_absolute_prefix_twice(self): # https://bitbucket.org/ned/coveragepy/issue/194 # Build a path that has two pieces matching the absolute path prefix. # Technically, this test doesn't do that on Windows, but drive # letters make that impractical to achieve. files.set_relative_directory() d = abs_file(os.curdir) trick = os.path.splitdrive(d)[1].lstrip(os.path.sep) rel = os.path.join('sub', trick, 'file1.py') self.assertEqual(files.relative_filename(abs_file(rel)), rel)
def convert_to_relative_paths(cls, smother_obj): data = defaultdict(lambda: dict()) set_relative_directory() for ctx, cover in smother_obj.data.items(): for src, lines in cover.items(): src = relative_filename(src) data[ctx][src] = lines result = cls() result.data = dict(data) return result
def test_canonical_filename_ensure_cache_hit(self): self.make_file("sub/proj1/file1.py") d = actual_path(self.abs_path("sub/proj1")) self.chdir(d) files.set_relative_directory() canonical_path = files.canonical_filename('sub/proj1/file1.py') self.assertEqual(canonical_path, self.abs_path('file1.py')) # After the filename has been converted, it should be in the cache. self.assertIn('sub/proj1/file1.py', files.CANONICAL_FILENAME_CACHE) self.assertEqual( files.canonical_filename('sub/proj1/file1.py'), self.abs_path('file1.py'))
def setUp(self): super(MatcherTest, self).setUp() files.set_relative_directory()
def _init(self): """Set all the initial state. This is called by the public methods to initialize state. This lets us construct a :class:`Coverage` object, then tweak its state before this function is called. """ if self._inited: return # Create and configure the debugging controller. COVERAGE_DEBUG_FILE # is an environment variable, the name of a file to append debug logs # to. if self._debug_file is None: debug_file_name = os.environ.get("COVERAGE_DEBUG_FILE") if debug_file_name: self._debug_file = open(debug_file_name, "a") else: self._debug_file = sys.stderr self.debug = DebugControl(self.config.debug, self._debug_file) # Load plugins self.plugins = Plugins.load_plugins(self.config.plugins, self.config, self.debug) # _exclude_re is a dict that maps exclusion list names to compiled # regexes. self._exclude_re = {} self._exclude_regex_stale() files.set_relative_directory() # The source argument can be directories or package names. self.source = [] self.source_pkgs = [] for src in self.config.source or []: if os.path.exists(src): self.source.append(files.canonical_filename(src)) else: self.source_pkgs.append(src) self.omit = prep_patterns(self.config.omit) self.include = prep_patterns(self.config.include) concurrency = self.config.concurrency if concurrency == "multiprocessing": patch_multiprocessing() concurrency = None self.collector = Collector( should_trace=self._should_trace, check_include=self._check_include_omit_etc, timid=self.config.timid, branch=self.config.branch, warn=self._warn, concurrency=concurrency, ) # Early warning if we aren't going to be able to support plugins. if self.plugins.file_tracers and not self.collector.supports_plugins: self._warn( "Plugin file tracers (%s) aren't supported with %s" % ( ", ".join( plugin._coverage_plugin_name for plugin in self.plugins.file_tracers ), self.collector.tracer_name(), ) ) for plugin in self.plugins.file_tracers: plugin._coverage_enabled = False # Suffixes are a bit tricky. We want to use the data suffix only when # collecting data, not when combining data. So we save it as # `self.run_suffix` now, and promote it to `self.data_suffix` if we # find that we are collecting data later. if self._data_suffix or self.config.parallel: if not isinstance(self._data_suffix, string_class): # if data_suffix=True, use .machinename.pid.random self._data_suffix = True else: self._data_suffix = None self.data_suffix = None self.run_suffix = self._data_suffix # Create the data file. We do this at construction time so that the # data file will be written into the directory where the process # started rather than wherever the process eventually chdir'd to. self.data = CoverageData(debug=self.debug) self.data_files = CoverageDataFiles(basename=self.config.data_file, warn=self._warn) # The directories for files considered "installed with the interpreter". self.pylib_dirs = set() if not self.config.cover_pylib: # Look at where some standard modules are located. That's the # indication for "installed with the interpreter". In some # environments (virtualenv, for example), these modules may be # spread across a few locations. Look at all the candidate modules # we've imported, and take all the different ones. for m in (atexit, inspect, os, platform, re, _structseq, traceback): if m is not None and hasattr(m, "__file__"): self.pylib_dirs.add(self._canonical_dir(m)) if _structseq and not hasattr(_structseq, '__file__'): # PyPy 2.4 has no __file__ in the builtin modules, but the code # objects still have the file names. So dig into one to find # the path to exclude. structseq_new = _structseq.structseq_new try: structseq_file = structseq_new.func_code.co_filename except AttributeError: structseq_file = structseq_new.__code__.co_filename self.pylib_dirs.add(self._canonical_dir(structseq_file)) # To avoid tracing the coverage.py code itself, we skip anything # located where we are. self.cover_dirs = [self._canonical_dir(__file__)] if env.TESTING: # When testing, we use PyContracts, which should be considered # part of coverage.py, and it uses six. Exclude those directories # just as we exclude ourselves. import contracts, six for mod in [contracts, six]: self.cover_dirs.append(self._canonical_dir(mod)) # Set the reporting precision. Numbers.set_precision(self.config.precision) atexit.register(self._atexit) self._inited = True # Create the matchers we need for _should_trace if self.source or self.source_pkgs: self.source_match = TreeMatcher(self.source) self.source_pkgs_match = ModuleMatcher(self.source_pkgs) else: if self.cover_dirs: self.cover_match = TreeMatcher(self.cover_dirs) if self.pylib_dirs: self.pylib_match = TreeMatcher(self.pylib_dirs) if self.include: self.include_match = FnmatchMatcher(self.include) if self.omit: self.omit_match = FnmatchMatcher(self.omit) # The user may want to debug things, show info if desired. wrote_any = False if self.debug.should('config'): config_info = sorted(self.config.__dict__.items()) self.debug.write_formatted_info("config", config_info) wrote_any = True if self.debug.should('sys'): self.debug.write_formatted_info("sys", self.sys_info()) for plugin in self.plugins: header = "sys: " + plugin._coverage_plugin_name info = plugin.sys_info() self.debug.write_formatted_info(header, info) wrote_any = True if wrote_any: self.debug.write_formatted_info("end", ())
def _init(self): """Set all the initial state. This is called by the public methods to initialize state. This lets us construct a :class:`Coverage` object, then tweak its state before this function is called. """ if self._inited: return # Create and configure the debugging controller. COVERAGE_DEBUG_FILE # is an environment variable, the name of a file to append debug logs # to. if self._debug_file is None: debug_file_name = os.environ.get("COVERAGE_DEBUG_FILE") if debug_file_name: self._debug_file = open(debug_file_name, "a") else: self._debug_file = sys.stderr self.debug = DebugControl(self.config.debug, self._debug_file) # Load plugins self.plugins = Plugins.load_plugins(self.config.plugins, self.config, self.debug) # _exclude_re is a dict that maps exclusion list names to compiled # regexes. self._exclude_re = {} self._exclude_regex_stale() files.set_relative_directory() # The source argument can be directories or package names. self.source = [] self.source_pkgs = [] for src in self.config.source or []: if os.path.exists(src): self.source.append(files.canonical_filename(src)) else: self.source_pkgs.append(src) self.omit = prep_patterns(self.config.omit) self.include = prep_patterns(self.config.include) concurrency = self.config.concurrency if concurrency == "multiprocessing": patch_multiprocessing() concurrency = None self.collector = Collector( should_trace=self._should_trace, check_include=self._check_include_omit_etc, timid=self.config.timid, branch=self.config.branch, warn=self._warn, concurrency=concurrency, ) # Early warning if we aren't going to be able to support plugins. if self.plugins.file_tracers and not self.collector.supports_plugins: self._warn("Plugin file tracers (%s) aren't supported with %s" % ( ", ".join(plugin._coverage_plugin_name for plugin in self.plugins.file_tracers), self.collector.tracer_name(), )) for plugin in self.plugins.file_tracers: plugin._coverage_enabled = False # Suffixes are a bit tricky. We want to use the data suffix only when # collecting data, not when combining data. So we save it as # `self.run_suffix` now, and promote it to `self.data_suffix` if we # find that we are collecting data later. if self._data_suffix or self.config.parallel: if not isinstance(self._data_suffix, string_class): # if data_suffix=True, use .machinename.pid.random self._data_suffix = True else: self._data_suffix = None self.data_suffix = None self.run_suffix = self._data_suffix # Create the data file. We do this at construction time so that the # data file will be written into the directory where the process # started rather than wherever the process eventually chdir'd to. self.data = CoverageData(debug=self.debug) self.data_files = CoverageDataFiles(basename=self.config.data_file) # The directories for files considered "installed with the interpreter". self.pylib_dirs = set() if not self.config.cover_pylib: # Look at where some standard modules are located. That's the # indication for "installed with the interpreter". In some # environments (virtualenv, for example), these modules may be # spread across a few locations. Look at all the candidate modules # we've imported, and take all the different ones. for m in (atexit, inspect, os, platform, _structseq, traceback): if m is not None and hasattr(m, "__file__"): self.pylib_dirs.add(self._canonical_dir(m)) if _structseq and not hasattr(_structseq, '__file__'): # PyPy 2.4 has no __file__ in the builtin modules, but the code # objects still have the file names. So dig into one to find # the path to exclude. structseq_new = _structseq.structseq_new try: structseq_file = structseq_new.func_code.co_filename except AttributeError: structseq_file = structseq_new.__code__.co_filename self.pylib_dirs.add(self._canonical_dir(structseq_file)) # To avoid tracing the coverage.py code itself, we skip anything # located where we are. self.cover_dirs = [self._canonical_dir(__file__)] if env.TESTING: # When testing, we use PyContracts, which should be considered # part of coverage.py, and it uses six. Exclude those directories # just as we exclude ourselves. import contracts, six for mod in [contracts, six]: self.cover_dirs.append(self._canonical_dir(mod)) # Set the reporting precision. Numbers.set_precision(self.config.precision) atexit.register(self._atexit) self._inited = True # Create the matchers we need for _should_trace if self.source or self.source_pkgs: self.source_match = TreeMatcher(self.source) self.source_pkgs_match = ModuleMatcher(self.source_pkgs) else: if self.cover_dirs: self.cover_match = TreeMatcher(self.cover_dirs) if self.pylib_dirs: self.pylib_match = TreeMatcher(self.pylib_dirs) if self.include: self.include_match = FnmatchMatcher(self.include) if self.omit: self.omit_match = FnmatchMatcher(self.omit) # The user may want to debug things, show info if desired. wrote_any = False if self.debug.should('config'): config_info = sorted(self.config.__dict__.items()) self.debug.write_formatted_info("config", config_info) wrote_any = True if self.debug.should('sys'): self.debug.write_formatted_info("sys", self.sys_info()) for plugin in self.plugins: header = "sys: " + plugin._coverage_plugin_name info = plugin.sys_info() self.debug.write_formatted_info(header, info) wrote_any = True if wrote_any: self.debug.write_formatted_info("end", ())
def setup_test(self): super().setup_test() files.set_relative_directory()
def reset_filesdotpy_globals(): """coverage/files.py has some unfortunate globals. Reset them every test.""" set_relative_directory() yield
def setUp(self): super().setUp() files.set_relative_directory()
def _init(self): """Set all the initial state. This is called by the public methods to initialize state. This lets us construct a :class:`Coverage` object, then tweak its state before this function is called. """ if self._inited: return self._inited = True # Create and configure the debugging controller. COVERAGE_DEBUG_FILE # is an environment variable, the name of a file to append debug logs # to. if self._debug_file is None: debug_file_name = os.environ.get("COVERAGE_DEBUG_FILE") if debug_file_name: self._debug_file = open(debug_file_name, "a") else: self._debug_file = sys.stderr self._debug = DebugControl(self.config.debug, self._debug_file) # _exclude_re is a dict that maps exclusion list names to compiled regexes. self._exclude_re = {} set_relative_directory() # Load plugins self._plugins = Plugins.load_plugins(self.config.plugins, self.config, self._debug) # Run configuring plugins. for plugin in self._plugins.configurers: # We need an object with set_option and get_option. Either self or # self.config will do. Choosing randomly stops people from doing # other things with those objects, against the public API. Yes, # this is a bit childish. :) plugin.configure([self, self.config][int(time.time()) % 2]) concurrency = self.config.concurrency or [] if "multiprocessing" in concurrency: if not patch_multiprocessing: raise CoverageException( # pragma: only jython "multiprocessing is not supported on this Python") patch_multiprocessing(rcfile=self.config.config_file) # Multi-processing uses parallel for the subprocesses, so also use # it for the main process. self.config.parallel = True self._collector = Collector( should_trace=self._should_trace, check_include=self._check_include_omit_etc, timid=self.config.timid, branch=self.config.branch, warn=self._warn, concurrency=concurrency, ) # Early warning if we aren't going to be able to support plugins. if self._plugins.file_tracers and not self._collector.supports_plugins: self._warn("Plugin file tracers (%s) aren't supported with %s" % ( ", ".join(plugin._coverage_plugin_name for plugin in self._plugins.file_tracers), self._collector.tracer_name(), )) for plugin in self._plugins.file_tracers: plugin._coverage_enabled = False # Create the file classifying substructure. self._inorout = self._inorout_class(warn=self._warn) self._inorout.configure(self.config) self._inorout.plugins = self._plugins self._inorout.disp_class = self._collector.file_disposition_class # Suffixes are a bit tricky. We want to use the data suffix only when # collecting data, not when combining data. So we save it as # `self._run_suffix` now, and promote it to `self._data_suffix` if we # find that we are collecting data later. if self._data_suffix_specified or self.config.parallel: if not isinstance(self._data_suffix_specified, string_class): # if data_suffix=True, use .machinename.pid.random self._data_suffix_specified = True else: self._data_suffix_specified = None self._data_suffix = None self._run_suffix = self._data_suffix_specified # Create the data file. We do this at construction time so that the # data file will be written into the directory where the process # started rather than wherever the process eventually chdir'd to. self._data = CoverageData(debug=self._debug) self._data_files = CoverageDataFiles( basename=self.config.data_file, warn=self._warn, debug=self._debug, ) # Set the reporting precision. Numbers.set_precision(self.config.precision) atexit.register(self._atexit) # The user may want to debug things, show info if desired. self._write_startup_debug()