예제 #1
0
    def __init__(
        self, test_runner, search_strategy, test, settings, random, had_seed
    ):
        self.test_runner = test_runner
        self.search_strategy = search_strategy
        self.settings = settings
        self.at_least_one_success = False
        self.last_exception = None
        self.falsifying_examples = ()
        self.__was_flaky = False
        self.random = random
        self.__warned_deadline = False
        self.__existing_collector = None
        self.__test_runtime = None
        self.__had_seed = had_seed

        self.test = test

        self.coverage_data = CoverageData()
        self.files_to_propagate = set()
        self.failed_normally = False

        self.used_examples_from_database = False

        if settings.use_coverage and not IN_COVERAGE_TESTS:  # pragma: no cover
            if Collector._collectors:
                parent = Collector._collectors[-1]

                # We include any files the collector has already decided to
                # trace whether or not on re-investigation we still think it
                # wants to trace them. The reason for this is that in some
                # cases coverage gets the wrong answer when we run it
                # ourselves due to reasons that are our fault but are hard to
                # fix (we lie about where certain functions come from).
                # This causes us to not record the actual test bodies as
                # covered. But if we intended to trace test bodies then the
                # file must already have been traced when getting to this point
                # and so will already be in the collector's data. Hence we can
                # use that information to get the correct answer here.
                # See issue 997 for more context.
                self.files_to_propagate = set(parent.data)
                self.hijack_collector(parent)

            self.collector = Collector(
                branch=True,
                timid=FORCE_PURE_TRACER,
                should_trace=self.should_trace,
                check_include=hypothesis_check_include,
                concurrency='thread',
                warn=escalate_warning,
            )
            self.collector.reset()

            # Hide the other collectors from this one so it doesn't attempt to
            # pause them (we're doing trace function management ourselves so
            # this will just cause problems).
            self.collector._collectors = []
        else:
            self.collector = None
예제 #2
0
    def _init_for_start(self):
        """Initialization for start()"""
        concurrency = self.config.concurrency or []
        if "multiprocessing" in concurrency:
            if not patch_multiprocessing:
                raise CoverageException(                    # pragma: only jython
                    "multiprocessing is not supported on this Python"
                )
            patch_multiprocessing(rcfile=self.config.config_file)
            # Multi-processing uses parallel for the subprocesses, so also use
            # it for the main process.
            self.config.parallel = True

        self._collector = Collector(
            should_trace=self._should_trace,
            check_include=self._check_include_omit_etc,
            timid=self.config.timid,
            branch=self.config.branch,
            warn=self._warn,
            concurrency=concurrency,
            )

        suffix = self._data_suffix_specified
        if suffix or self.config.parallel:
            if not isinstance(suffix, string_class):
                # if data_suffix=True, use .machinename.pid.random
                suffix = True
        else:
            suffix = None

        self._init_data(suffix)

        # Early warning if we aren't going to be able to support plugins.
        if self._plugins.file_tracers and not self._collector.supports_plugins:
            self._warn(
                "Plugin file tracers (%s) aren't supported with %s" % (
                    ", ".join(
                        plugin._coverage_plugin_name
                            for plugin in self._plugins.file_tracers
                        ),
                    self._collector.tracer_name(),
                    )
                )
            for plugin in self._plugins.file_tracers:
                plugin._coverage_enabled = False

        # Create the file classifying substructure.
        self._inorout = self._inorout_class(warn=self._warn)
        self._inorout.configure(self.config)
        self._inorout.plugins = self._plugins
        self._inorout.disp_class = self._collector.file_disposition_class

        atexit.register(self._atexit)
예제 #3
0
    def __init__(
        self, test_runner, search_strategy, test, settings, random, had_seed
    ):
        self.test_runner = test_runner
        self.search_strategy = search_strategy
        self.settings = settings
        self.at_least_one_success = False
        self.last_exception = None
        self.falsifying_examples = ()
        self.__was_flaky = False
        self.random = random
        self.__warned_deadline = False
        self.__existing_collector = None
        self.__test_runtime = None
        self.__had_seed = had_seed

        self.test = test

        self.coverage_data = CoverageData()
        self.files_to_propagate = set()

        self.used_examples_from_database = False

        if settings.use_coverage and not IN_COVERAGE_TESTS:  # pragma: no cover
            if Collector._collectors:
                self.hijack_collector(Collector._collectors[-1])

            self.collector = Collector(
                branch=True,
                timid=FORCE_PURE_TRACER,
                should_trace=self.should_trace,
                check_include=hypothesis_check_include,
                concurrency='thread',
                warn=escalate_warning,
            )
            self.collector.reset()

            # Hide the other collectors from this one so it doesn't attempt to
            # pause them (we're doing trace function management ourselves so
            # this will just cause problems).
            self.collector._collectors = []
        else:
            self.collector = None
예제 #4
0
    def _init_for_start(self):
        """Initialization for start()"""
        # Construct the collector.
        concurrency = self.config.concurrency or ()
        if "multiprocessing" in concurrency:
            if not patch_multiprocessing:
                raise CoverageException(                    # pragma: only jython
                    "multiprocessing is not supported on this Python"
                )
            patch_multiprocessing(rcfile=self.config.config_file)

        dycon = self.config.dynamic_context
        if not dycon or dycon == "none":
            context_switchers = []
        elif dycon == "test_function":
            context_switchers = [should_start_context_test_function]
        else:
            raise CoverageException(
                "Don't understand dynamic_context setting: {!r}".format(dycon)
            )

        context_switchers.extend(
            plugin.dynamic_context for plugin in self._plugins.context_switchers
        )

        should_start_context = combine_context_switchers(context_switchers)

        self._collector = Collector(
            should_trace=self._should_trace,
            check_include=self._check_include_omit_etc,
            should_start_context=should_start_context,
            file_mapper=self._file_mapper,
            timid=self.config.timid,
            branch=self.config.branch,
            warn=self._warn,
            concurrency=concurrency,
            )

        suffix = self._data_suffix_specified
        if suffix or self.config.parallel:
            if not isinstance(suffix, string_class):
                # if data_suffix=True, use .machinename.pid.random
                suffix = True
        else:
            suffix = None

        self._init_data(suffix)

        self._collector.use_data(self._data, self.config.context)

        # Early warning if we aren't going to be able to support plugins.
        if self._plugins.file_tracers and not self._collector.supports_plugins:
            self._warn(
                "Plugin file tracers (%s) aren't supported with %s" % (
                    ", ".join(
                        plugin._coverage_plugin_name
                            for plugin in self._plugins.file_tracers
                        ),
                    self._collector.tracer_name(),
                    )
                )
            for plugin in self._plugins.file_tracers:
                plugin._coverage_enabled = False

        # Create the file classifying substructure.
        self._inorout = self._inorout_class(warn=self._warn)
        self._inorout.configure(self.config)
        self._inorout.plugins = self._plugins
        self._inorout.disp_class = self._collector.file_disposition_class

        # It's useful to write debug info after initing for start.
        self._should_write_debug = True

        atexit.register(self._atexit)
예제 #5
0
    def _init(self):
        """Set all the initial state.

        This is called by the public methods to initialize state. This lets us
        construct a :class:`Coverage` object, then tweak its state before this
        function is called.

        """
        if self._inited:
            return

        # Create and configure the debugging controller. COVERAGE_DEBUG_FILE
        # is an environment variable, the name of a file to append debug logs
        # to.
        if self._debug_file is None:
            debug_file_name = os.environ.get("COVERAGE_DEBUG_FILE")
            if debug_file_name:
                self._debug_file = open(debug_file_name, "a")
            else:
                self._debug_file = sys.stderr
        self.debug = DebugControl(self.config.debug, self._debug_file)

        # Load plugins
        self.plugins = Plugins.load_plugins(self.config.plugins, self.config,
                                            self.debug)

        # _exclude_re is a dict that maps exclusion list names to compiled
        # regexes.
        self._exclude_re = {}
        self._exclude_regex_stale()

        files.set_relative_directory()

        # The source argument can be directories or package names.
        self.source = []
        self.source_pkgs = []
        for src in self.config.source or []:
            if os.path.exists(src):
                self.source.append(files.canonical_filename(src))
            else:
                self.source_pkgs.append(src)

        self.omit = prep_patterns(self.config.omit)
        self.include = prep_patterns(self.config.include)

        concurrency = self.config.concurrency
        if concurrency == "multiprocessing":
            patch_multiprocessing()
            concurrency = None

        self.collector = Collector(
            should_trace=self._should_trace,
            check_include=self._check_include_omit_etc,
            timid=self.config.timid,
            branch=self.config.branch,
            warn=self._warn,
            concurrency=concurrency,
        )

        # Early warning if we aren't going to be able to support plugins.
        if self.plugins.file_tracers and not self.collector.supports_plugins:
            self._warn("Plugin file tracers (%s) aren't supported with %s" % (
                ", ".join(plugin._coverage_plugin_name
                          for plugin in self.plugins.file_tracers),
                self.collector.tracer_name(),
            ))
            for plugin in self.plugins.file_tracers:
                plugin._coverage_enabled = False

        # Suffixes are a bit tricky.  We want to use the data suffix only when
        # collecting data, not when combining data.  So we save it as
        # `self.run_suffix` now, and promote it to `self.data_suffix` if we
        # find that we are collecting data later.
        if self._data_suffix or self.config.parallel:
            if not isinstance(self._data_suffix, string_class):
                # if data_suffix=True, use .machinename.pid.random
                self._data_suffix = True
        else:
            self._data_suffix = None
        self.data_suffix = None
        self.run_suffix = self._data_suffix

        # Create the data file.  We do this at construction time so that the
        # data file will be written into the directory where the process
        # started rather than wherever the process eventually chdir'd to.
        self.data = CoverageData(debug=self.debug)
        self.data_files = CoverageDataFiles(basename=self.config.data_file)

        # The directories for files considered "installed with the interpreter".
        self.pylib_dirs = set()
        if not self.config.cover_pylib:
            # Look at where some standard modules are located. That's the
            # indication for "installed with the interpreter". In some
            # environments (virtualenv, for example), these modules may be
            # spread across a few locations. Look at all the candidate modules
            # we've imported, and take all the different ones.
            for m in (atexit, inspect, os, platform, _structseq, traceback):
                if m is not None and hasattr(m, "__file__"):
                    self.pylib_dirs.add(self._canonical_dir(m))
            if _structseq and not hasattr(_structseq, '__file__'):
                # PyPy 2.4 has no __file__ in the builtin modules, but the code
                # objects still have the file names.  So dig into one to find
                # the path to exclude.
                structseq_new = _structseq.structseq_new
                try:
                    structseq_file = structseq_new.func_code.co_filename
                except AttributeError:
                    structseq_file = structseq_new.__code__.co_filename
                self.pylib_dirs.add(self._canonical_dir(structseq_file))

        # To avoid tracing the coverage.py code itself, we skip anything
        # located where we are.
        self.cover_dirs = [self._canonical_dir(__file__)]
        if env.TESTING:
            # When testing, we use PyContracts, which should be considered
            # part of coverage.py, and it uses six. Exclude those directories
            # just as we exclude ourselves.
            import contracts, six
            for mod in [contracts, six]:
                self.cover_dirs.append(self._canonical_dir(mod))

        # Set the reporting precision.
        Numbers.set_precision(self.config.precision)

        atexit.register(self._atexit)

        self._inited = True

        # Create the matchers we need for _should_trace
        if self.source or self.source_pkgs:
            self.source_match = TreeMatcher(self.source)
            self.source_pkgs_match = ModuleMatcher(self.source_pkgs)
        else:
            if self.cover_dirs:
                self.cover_match = TreeMatcher(self.cover_dirs)
            if self.pylib_dirs:
                self.pylib_match = TreeMatcher(self.pylib_dirs)
        if self.include:
            self.include_match = FnmatchMatcher(self.include)
        if self.omit:
            self.omit_match = FnmatchMatcher(self.omit)

        # The user may want to debug things, show info if desired.
        wrote_any = False
        if self.debug.should('config'):
            config_info = sorted(self.config.__dict__.items())
            self.debug.write_formatted_info("config", config_info)
            wrote_any = True

        if self.debug.should('sys'):
            self.debug.write_formatted_info("sys", self.sys_info())
            for plugin in self.plugins:
                header = "sys: " + plugin._coverage_plugin_name
                info = plugin.sys_info()
                self.debug.write_formatted_info(header, info)
            wrote_any = True

        if wrote_any:
            self.debug.write_formatted_info("end", ())
예제 #6
0
    def __init__(self,
                 data_file=None,
                 data_suffix=None,
                 cover_pylib=None,
                 auto_data=False,
                 timid=None,
                 branch=None,
                 config_file=True,
                 source=None,
                 omit=None,
                 include=None):
        """
        `data_file` is the base name of the data file to use, defaulting to
        ".coverage".  `data_suffix` is appended (with a dot) to `data_file` to
        create the final file name.  If `data_suffix` is simply True, then a
        suffix is created with the machine and process identity included.

        `cover_pylib` is a boolean determining whether Python code installed
        with the Python interpreter is measured.  This includes the Python
        standard library and any packages installed with the interpreter.

        If `auto_data` is true, then any existing data file will be read when
        coverage measurement starts, and data will be saved automatically when
        measurement stops.

        If `timid` is true, then a slower and simpler trace function will be
        used.  This is important for some environments where manipulation of
        tracing functions breaks the faster trace function.

        If `branch` is true, then branch coverage will be measured in addition
        to the usual statement coverage.

        `config_file` determines what config file to read.  If it is a string,
        it is the name of the config file to read.  If it is True, then a
        standard file is read (".coveragerc").  If it is False, then no file is
        read.

        `source` is a list of file paths or package names.  Only code located
        in the trees indicated by the file paths or package names will be
        measured.

        `include` and `omit` are lists of filename patterns. Files that match
        `include` will be measured, files that match `omit` will not.  Each
        will also accept a single string argument.

        """
        from coverage import __version__

        # A record of all the warnings that have been issued.
        self._warnings = []

        # Build our configuration from a number of sources:
        # 1: defaults:
        self.config = CoverageConfig()

        # 2: from the coveragerc file:
        if config_file:
            if config_file is True:
                config_file = ".coveragerc"
            try:
                self.config.from_file(config_file)
            except ValueError:
                _, err, _ = sys.exc_info()
                raise CoverageException("Couldn't read config file %s: %s" %
                                        (config_file, err))

        # 3: from environment variables:
        self.config.from_environment('COVERAGE_OPTIONS')
        env_data_file = os.environ.get('COVERAGE_FILE')
        if env_data_file:
            self.config.data_file = env_data_file

        # 4: from constructor arguments:
        self.config.from_args(data_file=data_file,
                              cover_pylib=cover_pylib,
                              timid=timid,
                              branch=branch,
                              parallel=bool_or_none(data_suffix),
                              source=source,
                              omit=omit,
                              include=include)

        self.auto_data = auto_data

        # _exclude_re is a dict mapping exclusion list names to compiled
        # regexes.
        self._exclude_re = {}
        self._exclude_regex_stale()

        self.file_locator = FileLocator()

        # The source argument can be directories or package names.
        self.source = []
        self.source_pkgs = []
        for src in self.config.source or []:
            if os.path.exists(src):
                self.source.append(self.file_locator.canonical_filename(src))
            else:
                self.source_pkgs.append(src)

        self.omit = prep_patterns(self.config.omit)
        self.include = prep_patterns(self.config.include)

        self.collector = Collector(self._should_trace,
                                   timid=self.config.timid,
                                   branch=self.config.branch,
                                   warn=self._warn)

        # Suffixes are a bit tricky.  We want to use the data suffix only when
        # collecting data, not when combining data.  So we save it as
        # `self.run_suffix` now, and promote it to `self.data_suffix` if we
        # find that we are collecting data later.
        if data_suffix or self.config.parallel:
            if not isinstance(data_suffix, string_class):
                # if data_suffix=True, use .machinename.pid.random
                data_suffix = True
        else:
            data_suffix = None
        self.data_suffix = None
        self.run_suffix = data_suffix

        # Create the data file.  We do this at construction time so that the
        # data file will be written into the directory where the process
        # started rather than wherever the process eventually chdir'd to.
        self.data = CoverageData(basename=self.config.data_file,
                                 collector="coverage v%s" % __version__)

        # The dirs for files considered "installed with the interpreter".
        self.pylib_dirs = []
        if not self.config.cover_pylib:
            # Look at where some standard modules are located. That's the
            # indication for "installed with the interpreter". In some
            # environments (virtualenv, for example), these modules may be
            # spread across a few locations. Look at all the candidate modules
            # we've imported, and take all the different ones.
            for m in (atexit, os, random, socket):
                if hasattr(m, "__file__"):
                    m_dir = self._canonical_dir(m)
                    if m_dir not in self.pylib_dirs:
                        self.pylib_dirs.append(m_dir)

        # To avoid tracing the coverage code itself, we skip anything located
        # where we are.
        self.cover_dir = self._canonical_dir(__file__)

        # The matchers for _should_trace, created when tracing starts.
        self.source_match = None
        self.pylib_match = self.cover_match = None
        self.include_match = self.omit_match = None

        # Only _harvest_data once per measurement cycle.
        self._harvested = False

        # Set the reporting precision.
        Numbers.set_precision(self.config.precision)

        # Is it ok for no data to be collected?
        self._warn_no_data = True
        self._started = False

        atexit.register(self._atexit)
예제 #7
0
    def _init(self):
        """Set all the initial state.

        This is called by the public methods to initialize state. This lets us
        construct a Coverage object, then tweak its state before this function
        is called.

        """
        from coverage import __version__

        if self._inited:
            return

        # Create and configure the debugging controller.
        if self._debug_file is None:
            self._debug_file = sys.stderr
        self.debug = DebugControl(self.config.debug, self._debug_file)

        # Load plugins
        self.plugins = Plugins.load_plugins(self.config.plugins, self.config)

        # TEMPORARY, because the plugin support is implemented in PyTracer.
        # This will be removed when that support is moved into CTracer.
        if self.plugins:
            self._warn("Setting timid=True to support plugins.")
            self.config.timid = True

        self.file_tracers = []
        for plugin in self.plugins:
            if overrides(plugin, "file_tracer", CoveragePlugin):
                self.file_tracers.append(plugin)
        self.file_tracers.append(None)      # The Python case.

        # _exclude_re is a dict mapping exclusion list names to compiled
        # regexes.
        self._exclude_re = {}
        self._exclude_regex_stale()

        self.file_locator = FileLocator()

        # The source argument can be directories or package names.
        self.source = []
        self.source_pkgs = []
        for src in self.config.source or []:
            if os.path.exists(src):
                self.source.append(self.file_locator.canonical_filename(src))
            else:
                self.source_pkgs.append(src)

        self.omit = prep_patterns(self.config.omit)
        self.include = prep_patterns(self.config.include)

        self.collector = Collector(
            should_trace=self._should_trace,
            check_include=self._check_include_omit_etc,
            timid=self.config.timid,
            branch=self.config.branch,
            warn=self._warn,
            concurrency=self.config.concurrency,
            )

        # Suffixes are a bit tricky.  We want to use the data suffix only when
        # collecting data, not when combining data.  So we save it as
        # `self.run_suffix` now, and promote it to `self.data_suffix` if we
        # find that we are collecting data later.
        if self._data_suffix or self.config.parallel:
            if not isinstance(self._data_suffix, string_class):
                # if data_suffix=True, use .machinename.pid.random
                self._data_suffix = True
        else:
            self._data_suffix = None
        self.data_suffix = None
        self.run_suffix = self._data_suffix

        # Create the data file.  We do this at construction time so that the
        # data file will be written into the directory where the process
        # started rather than wherever the process eventually chdir'd to.
        self.data = CoverageData(
            basename=self.config.data_file,
            collector="coverage v%s" % __version__,
            debug=self.debug,
            )

        # The dirs for files considered "installed with the interpreter".
        self.pylib_dirs = set()
        if not self.config.cover_pylib:
            # Look at where some standard modules are located. That's the
            # indication for "installed with the interpreter". In some
            # environments (virtualenv, for example), these modules may be
            # spread across a few locations. Look at all the candidate modules
            # we've imported, and take all the different ones.
            for m in (atexit, os, platform, random, socket, _structseq):
                if m is not None and hasattr(m, "__file__"):
                    self.pylib_dirs.add(self._canonical_dir(m))
            if _structseq and not hasattr(_structseq, '__file__'):
                # PyPy 2.4 has no __file__ in the builtin modules, but the code
                # objects still have the filenames.  So dig into one to find
                # the path to exclude.
                structseq_new = _structseq.structseq_new
                try:
                    structseq_file = structseq_new.func_code.co_filename
                except AttributeError:
                    structseq_file = structseq_new.__code__.co_filename
                self.pylib_dirs.add(self._canonical_dir(structseq_file))

        # To avoid tracing the coverage code itself, we skip anything located
        # where we are.
        self.cover_dir = self._canonical_dir(__file__)

        # Set the reporting precision.
        Numbers.set_precision(self.config.precision)

        atexit.register(self._atexit)

        self._inited = True

        # Create the matchers we need for _should_trace
        if self.source or self.source_pkgs:
            self.source_match = TreeMatcher(self.source)
            self.source_pkgs_match = ModuleMatcher(self.source_pkgs)
        else:
            if self.cover_dir:
                self.cover_match = TreeMatcher([self.cover_dir])
            if self.pylib_dirs:
                self.pylib_match = TreeMatcher(self.pylib_dirs)
        if self.include:
            self.include_match = FnmatchMatcher(self.include)
        if self.omit:
            self.omit_match = FnmatchMatcher(self.omit)

        # The user may want to debug things, show info if desired.
        wrote_any = False
        if self.debug.should('config'):
            config_info = sorted(self.config.__dict__.items())
            self.debug.write_formatted_info("config", config_info)
            wrote_any = True

        if self.debug.should('sys'):
            self.debug.write_formatted_info("sys", self.sys_info())
            for plugin in self.plugins:
                header = "sys: " + plugin.plugin_name
                info = plugin.sys_info()
                self.debug.write_formatted_info(header, info)
            wrote_any = True

        if wrote_any:
            self.debug.write_formatted_info("end", ())
예제 #8
0
    def _init(self):
        """Set all the initial state.

        This is called by the public methods to initialize state. This lets us
        construct a :class:`Coverage` object, then tweak its state before this
        function is called.

        """
        if self._inited:
            return

        self._inited = True

        # Create and configure the debugging controller. COVERAGE_DEBUG_FILE
        # is an environment variable, the name of a file to append debug logs
        # to.
        if self._debug_file is None:
            debug_file_name = os.environ.get("COVERAGE_DEBUG_FILE")
            if debug_file_name:
                self._debug_file = open(debug_file_name, "a")
            else:
                self._debug_file = sys.stderr
        self._debug = DebugControl(self.config.debug, self._debug_file)

        # _exclude_re is a dict that maps exclusion list names to compiled regexes.
        self._exclude_re = {}

        set_relative_directory()

        # Load plugins
        self._plugins = Plugins.load_plugins(self.config.plugins, self.config,
                                             self._debug)

        # Run configuring plugins.
        for plugin in self._plugins.configurers:
            # We need an object with set_option and get_option. Either self or
            # self.config will do. Choosing randomly stops people from doing
            # other things with those objects, against the public API.  Yes,
            # this is a bit childish. :)
            plugin.configure([self, self.config][int(time.time()) % 2])

        concurrency = self.config.concurrency or []
        if "multiprocessing" in concurrency:
            if not patch_multiprocessing:
                raise CoverageException(  # pragma: only jython
                    "multiprocessing is not supported on this Python")
            patch_multiprocessing(rcfile=self.config.config_file)
            # Multi-processing uses parallel for the subprocesses, so also use
            # it for the main process.
            self.config.parallel = True

        self._collector = Collector(
            should_trace=self._should_trace,
            check_include=self._check_include_omit_etc,
            timid=self.config.timid,
            branch=self.config.branch,
            warn=self._warn,
            concurrency=concurrency,
        )

        # Early warning if we aren't going to be able to support plugins.
        if self._plugins.file_tracers and not self._collector.supports_plugins:
            self._warn("Plugin file tracers (%s) aren't supported with %s" % (
                ", ".join(plugin._coverage_plugin_name
                          for plugin in self._plugins.file_tracers),
                self._collector.tracer_name(),
            ))
            for plugin in self._plugins.file_tracers:
                plugin._coverage_enabled = False

        # Create the file classifying substructure.
        self._inorout = self._inorout_class(warn=self._warn)
        self._inorout.configure(self.config)
        self._inorout.plugins = self._plugins
        self._inorout.disp_class = self._collector.file_disposition_class

        # Suffixes are a bit tricky.  We want to use the data suffix only when
        # collecting data, not when combining data.  So we save it as
        # `self._run_suffix` now, and promote it to `self._data_suffix` if we
        # find that we are collecting data later.
        if self._data_suffix_specified or self.config.parallel:
            if not isinstance(self._data_suffix_specified, string_class):
                # if data_suffix=True, use .machinename.pid.random
                self._data_suffix_specified = True
        else:
            self._data_suffix_specified = None
        self._data_suffix = None
        self._run_suffix = self._data_suffix_specified

        # Create the data file.  We do this at construction time so that the
        # data file will be written into the directory where the process
        # started rather than wherever the process eventually chdir'd to.
        self._data = CoverageData(debug=self._debug)
        self._data_files = CoverageDataFiles(
            basename=self.config.data_file,
            warn=self._warn,
            debug=self._debug,
        )

        # Set the reporting precision.
        Numbers.set_precision(self.config.precision)

        atexit.register(self._atexit)

        # The user may want to debug things, show info if desired.
        self._write_startup_debug()
예제 #9
0
    def __init__(self, test_runner, search_strategy, test, settings, random):
        self.test_runner = test_runner
        self.search_strategy = search_strategy
        self.settings = settings
        self.at_least_one_success = False
        self.last_exception = None
        self.repr_for_last_exception = None
        self.falsifying_examples = ()
        self.__was_flaky = False
        self.random = random
        self.__warned_deadline = False
        self.__existing_collector = None
        self.__test_runtime = None
        self.__in_final_replay = False

        if self.settings.deadline is None:
            self.test = test
        else:

            @proxies(test)
            def timed_test(*args, **kwargs):
                self.__test_runtime = None
                start = time.time()
                result = test(*args, **kwargs)
                runtime = (time.time() - start) * 1000
                self.__test_runtime = runtime
                if self.settings.deadline is not_set:
                    if (not self.__warned_deadline and runtime >= 200):
                        self.__warned_deadline = True
                        note_deprecation(
                            ('Test took %.2fms to run. In future the default '
                             'deadline setting will be 200ms, which will '
                             'make this an error. You can set deadline to '
                             'an explicit value of e.g. %d to turn tests '
                             'slower than this into an error, or you can set '
                             'it to None to disable this check entirely.') % (
                                 runtime,
                                 ceil(runtime / 100) * 100,
                             ))
                elif runtime >= self.current_deadline:
                    raise DeadlineExceeded(runtime, self.settings.deadline)
                return result

            self.test = timed_test

        self.coverage_data = CoverageData()
        self.files_to_propagate = set()

        if settings.use_coverage and not IN_COVERAGE_TESTS:  # pragma: no cover
            if Collector._collectors:
                self.hijack_collector(Collector._collectors[-1])

            self.collector = Collector(
                branch=True,
                timid=FORCE_PURE_TRACER,
                should_trace=self.should_trace,
                check_include=hypothesis_check_include,
                concurrency='thread',
                warn=escalate_warning,
            )
            self.collector.reset()

            # Hide the other collectors from this one so it doesn't attempt to
            # pause them (we're doing trace function management ourselves so
            # this will just cause problems).
            self.collector._collectors = []
        else:
            self.collector = None
예제 #10
0
    def __init__(self,
                 data_file=None,
                 data_suffix=None,
                 cover_pylib=None,
                 auto_data=False,
                 timid=None,
                 branch=None,
                 config_file=True,
                 source=None,
                 omit=None,
                 include=None):
        """
        `data_file` is the base name of the data file to use, defaulting to
        ".coverage".  `data_suffix` is appended (with a dot) to `data_file` to
        create the final file name.  If `data_suffix` is simply True, then a
        suffix is created with the machine and process identity included.

        `cover_pylib` is a boolean determining whether Python code installed
        with the Python interpreter is measured.  This includes the Python
        standard library and any packages installed with the interpreter.

        If `auto_data` is true, then any existing data file will be read when
        coverage measurement starts, and data will be saved automatically when
        measurement stops.

        If `timid` is true, then a slower and simpler trace function will be
        used.  This is important for some environments where manipulation of
        tracing functions breaks the faster trace function.

        If `branch` is true, then branch coverage will be measured in addition
        to the usual statement coverage.

        `config_file` determines what config file to read.  If it is a string,
        it is the name of the config file to read.  If it is True, then a
        standard file is read (".coveragerc").  If it is False, then no file is
        read.

        `source` is a list of file paths or package names.  Only code located
        in the trees indicated by the file paths or package names will be
        measured.

        `include` and `omit` are lists of filename patterns. Files that match
        `include` will be measured, files that match `omit` will not.

        """
        from coverage import __version__

        # Build our configuration from a number of sources:
        # 1: defaults:
        self.config = CoverageConfig()

        # 2: from the coveragerc file:
        if config_file:
            if config_file is True:
                config_file = ".coveragerc"
            self.config.from_file(config_file)

        # 3: from environment variables:
        self.config.from_environment('COVERAGE_OPTIONS')
        env_data_file = os.environ.get('COVERAGE_FILE')
        if env_data_file:
            self.config.data_file = env_data_file

        # 4: from constructor arguments:
        self.config.from_args(data_file=data_file,
                              cover_pylib=cover_pylib,
                              timid=timid,
                              branch=branch,
                              parallel=bool_or_none(data_suffix),
                              source=source,
                              omit=omit,
                              include=include)

        self.auto_data = auto_data
        self.atexit_registered = False

        self.exclude_re = ""
        self._compile_exclude()

        self.file_locator = FileLocator()

        # The source argument can be directories or package names.
        self.source = []
        self.source_pkgs = []
        for src in self.config.source or []:
            if os.path.exists(src):
                self.source.append(self.file_locator.canonical_filename(src))
            else:
                self.source_pkgs.append(src)

        self.omit = self._abs_files(self.config.omit)
        self.include = self._abs_files(self.config.include)

        self.collector = Collector(self._should_trace,
                                   timid=self.config.timid,
                                   branch=self.config.branch)

        # Suffixes are a bit tricky.  We want to use the data suffix only when
        # collecting data, not when combining data.  So we save it as
        # `self.run_suffix` now, and promote it to `self.data_suffix` if we
        # find that we are collecting data later.
        if data_suffix or self.config.parallel:
            if not isinstance(data_suffix, string_class):
                # if data_suffix=True, use .machinename.pid.random
                data_suffix = True
        else:
            data_suffix = None
        self.data_suffix = None
        self.run_suffix = data_suffix

        # Create the data file.  We do this at construction time so that the
        # data file will be written into the directory where the process
        # started rather than wherever the process eventually chdir'd to.
        self.data = CoverageData(basename=self.config.data_file,
                                 collector="coverage v%s" % __version__)

        # The dirs for files considered "installed with the interpreter".
        self.pylib_dirs = []
        if not self.config.cover_pylib:
            # Look at where the "os" module is located.  That's the indication
            # for "installed with the interpreter".
            os_dir = self.canonical_dir(os.__file__)
            self.pylib_dirs.append(os_dir)

            # In a virtualenv, there're actually two lib directories. Find the
            # other one.  This is kind of ad-hoc, but it works.
            random_dir = self.canonical_dir(random.__file__)
            if random_dir != os_dir:
                self.pylib_dirs.append(random_dir)

        # To avoid tracing the coverage code itself, we skip anything located
        # where we are.
        self.cover_dir = self.canonical_dir(__file__)

        # The matchers for _should_trace, created when tracing starts.
        self.source_match = None
        self.pylib_match = self.cover_match = None
        self.include_match = self.omit_match = None
예제 #11
0
파일: control.py 프로젝트: samucc/kuma
    def __init__(self, data_file=None, data_suffix=False, cover_pylib=False,
                auto_data=False, timid=False, branch=False):
        """        
        `data_file` is the base name of the data file to use, defaulting to
        ".coverage".  `data_suffix` is appended to `data_file` to create the
        final file name.  If `data_suffix` is simply True, then a suffix is
        created with the machine and process identity included.
        
        `cover_pylib` is a boolean determining whether Python code installed
        with the Python interpreter is measured.  This includes the Python
        standard library and any packages installed with the interpreter.
        
        If `auto_data` is true, then any existing data file will be read when
        coverage measurement starts, and data will be saved automatically when
        measurement stops.
        
        If `timid` is true, then a slower and simpler trace function will be
        used.  This is important for some environments where manipulation of
        tracing functions breaks the faster trace function.
        
        If `branch` is true, then branch coverage will be measured in addition
        to the usual statement coverage.

        """
        from coverage import __version__
        
        self.cover_pylib = cover_pylib
        self.auto_data = auto_data
        self.atexit_registered = False

        self.exclude_re = ""
        self.exclude_list = []
        
        self.file_locator = FileLocator()
        
        # Timidity: for nose users, read an environment variable.  This is a
        # cheap hack, since the rest of the command line arguments aren't
        # recognized, but it solves some users' problems.
        timid = timid or ('--timid' in os.environ.get('COVERAGE_OPTIONS', ''))
        self.collector = Collector(
            self._should_trace, timid=timid, branch=branch
            )

        # Create the data file.
        if data_suffix:
            if not isinstance(data_suffix, string_class):
                # if data_suffix=True, use .machinename.pid
                data_suffix = ".%s.%s" % (socket.gethostname(), os.getpid())
        else:
            data_suffix = None

        self.data = CoverageData(
            basename=data_file, suffix=data_suffix,
            collector="coverage v%s" % __version__
            )

        # The default exclude pattern.
        self.exclude('# *pragma[: ]*[nN][oO] *[cC][oO][vV][eE][rR]')

        # The prefix for files considered "installed with the interpreter".
        if not self.cover_pylib:
            # Look at where the "os" module is located.  That's the indication
            # for "installed with the interpreter".
            os_file = self.file_locator.canonical_filename(os.__file__)
            self.pylib_prefix = os.path.split(os_file)[0]

        # To avoid tracing the coverage code itself, we skip anything located
        # where we are.
        here = self.file_locator.canonical_filename(__file__)
        self.cover_prefix = os.path.split(here)[0]
예제 #12
0
    def __init__(self,
                 data_file=None,
                 data_suffix=None,
                 cover_pylib=None,
                 auto_data=False,
                 timid=None,
                 branch=None,
                 config_file=True,
                 source=None,
                 omit=None,
                 include=None,
                 debug=None,
                 debug_file=None):
        from coverage import __version__
        self._warnings = []
        self.config = CoverageConfig()
        if config_file:
            if config_file is True:
                config_file = '.coveragerc'
            try:
                self.config.from_file(config_file)
            except ValueError:
                _, err, _ = sys.exc_info()
                raise CoverageException("Couldn't read config file %s: %s" %
                                        (config_file, err))

        self.config.from_environment('COVERAGE_OPTIONS')
        env_data_file = os.environ.get('COVERAGE_FILE')
        if env_data_file:
            self.config.data_file = env_data_file
        self.config.from_args(data_file=data_file,
                              cover_pylib=cover_pylib,
                              timid=timid,
                              branch=branch,
                              parallel=bool_or_none(data_suffix),
                              source=source,
                              omit=omit,
                              include=include,
                              debug=debug)
        self.debug = DebugControl(self.config.debug, debug_file or sys.stderr)
        self.auto_data = auto_data
        self._exclude_re = {}
        self._exclude_regex_stale()
        self.file_locator = FileLocator()
        self.source = []
        self.source_pkgs = []
        for src in self.config.source or []:
            if os.path.exists(src):
                self.source.append(self.file_locator.canonical_filename(src))
            else:
                self.source_pkgs.append(src)

        self.omit = prep_patterns(self.config.omit)
        self.include = prep_patterns(self.config.include)
        self.collector = Collector(self._should_trace,
                                   timid=self.config.timid,
                                   branch=self.config.branch,
                                   warn=self._warn)
        if data_suffix or self.config.parallel:
            if not isinstance(data_suffix, string_class):
                data_suffix = True
        else:
            data_suffix = None
        self.data_suffix = None
        self.run_suffix = data_suffix
        self.data = CoverageData(basename=self.config.data_file,
                                 collector='coverage v%s' % __version__,
                                 debug=self.debug)
        self.pylib_dirs = []
        if not self.config.cover_pylib:
            for m in (atexit, os, random, socket, _structseq):
                if m is not None and hasattr(m, '__file__'):
                    m_dir = self._canonical_dir(m)
                    if m_dir not in self.pylib_dirs:
                        self.pylib_dirs.append(m_dir)

        self.cover_dir = self._canonical_dir(__file__)
        self.source_match = None
        self.pylib_match = self.cover_match = None
        self.include_match = self.omit_match = None
        Numbers.set_precision(self.config.precision)
        self._warn_no_data = True
        self._warn_unimported_source = True
        self._started = False
        self._measured = False
        atexit.register(self._atexit)
예제 #13
0
    def __init__(self,
                 data_file=None,
                 data_suffix=None,
                 cover_pylib=None,
                 auto_data=False,
                 timid=None,
                 branch=None,
                 config_file=True,
                 source=None,
                 omit=None,
                 include=None,
                 debug=None,
                 debug_file=None):
        """
        `data_file` is the base name of the data file to use, defaulting to
        ".coverage".  `data_suffix` is appended (with a dot) to `data_file` to
        create the final file name.  If `data_suffix` is simply True, then a
        suffix is created with the machine and process identity included.
        
        `cover_pylib` is a boolean determining whether Python code installed
        with the Python interpreter is measured.  This includes the Python
        standard library and any packages installed with the interpreter.
        
        If `auto_data` is true, then any existing data file will be read when
        coverage measurement starts, and data will be saved automatically when
        measurement stops.
        
        If `timid` is true, then a slower and simpler trace function will be
        used.  This is important for some environments where manipulation of
        tracing functions breaks the faster trace function.
        
        If `branch` is true, then branch coverage will be measured in addition
        to the usual statement coverage.
        
        `config_file` determines what config file to read.  If it is a string,
        it is the name of the config file to read.  If it is True, then a
        standard file is read (".coveragerc").  If it is False, then no file is
        read.
        
        `source` is a list of file paths or package names.  Only code located
        in the trees indicated by the file paths or package names will be
        measured.
        
        `include` and `omit` are lists of filename patterns. Files that match
        `include` will be measured, files that match `omit` will not.  Each
        will also accept a single string argument.
        
        `debug` is a list of strings indicating what debugging information is
        desired. `debug_file` is the file to write debug messages to,
        defaulting to stderr.
        
        """
        from coverage import __version__
        self._warnings = []
        self.config = CoverageConfig()
        if config_file:
            if config_file is True:
                config_file = '.coveragerc'
            try:
                self.config.from_file(config_file)
            except ValueError:
                _, err, _ = sys.exc_info()
                raise CoverageException("Couldn't read config file %s: %s" %
                                        (config_file, err))

        self.config.from_environment('COVERAGE_OPTIONS')
        env_data_file = os.environ.get('COVERAGE_FILE')
        if env_data_file:
            self.config.data_file = env_data_file
        self.config.from_args(data_file=data_file,
                              cover_pylib=cover_pylib,
                              timid=timid,
                              branch=branch,
                              parallel=bool_or_none(data_suffix),
                              source=source,
                              omit=omit,
                              include=include,
                              debug=debug)
        self.debug = DebugControl(self.config.debug, debug_file or sys.stderr)
        self.auto_data = auto_data
        self._exclude_re = {}
        self._exclude_regex_stale()
        self.file_locator = FileLocator()
        self.source = []
        self.source_pkgs = []
        for src in self.config.source or []:
            if os.path.exists(src):
                self.source.append(self.file_locator.canonical_filename(src))
            else:
                self.source_pkgs.append(src)

        self.omit = prep_patterns(self.config.omit)
        self.include = prep_patterns(self.config.include)
        self.collector = Collector(self._should_trace,
                                   timid=self.config.timid,
                                   branch=self.config.branch,
                                   warn=self._warn)
        if data_suffix or self.config.parallel:
            if not isinstance(data_suffix, string_class):
                data_suffix = True
        else:
            data_suffix = None
        self.data_suffix = None
        self.run_suffix = data_suffix
        self.data = CoverageData(basename=self.config.data_file,
                                 collector='coverage v%s' % __version__,
                                 debug=self.debug)
        self.pylib_dirs = []
        if not self.config.cover_pylib:
            for m in (atexit, os, random, socket, _structseq):
                if m is not None and hasattr(m, '__file__'):
                    m_dir = self._canonical_dir(m)
                    if m_dir not in self.pylib_dirs:
                        self.pylib_dirs.append(m_dir)

        self.cover_dir = self._canonical_dir(__file__)
        self.source_match = None
        self.pylib_match = self.cover_match = None
        self.include_match = self.omit_match = None
        Numbers.set_precision(self.config.precision)
        self._warn_no_data = True
        self._warn_unimported_source = True
        self._started = False
        self._measured = False
        atexit.register(self._atexit)
예제 #14
0
    def _init_for_start(self):
        """Initialization for start()"""
        # Construct the collector.
        concurrency = self.config.concurrency or []
        if "multiprocessing" in concurrency:
            if not patch_multiprocessing:
                raise ConfigError(  # pragma: only jython
                    "multiprocessing is not supported on this Python")
            if self.config.config_file is None:
                raise ConfigError(
                    "multiprocessing requires a configuration file")
            patch_multiprocessing(rcfile=self.config.config_file)

        dycon = self.config.dynamic_context
        if not dycon or dycon == "none":
            context_switchers = []
        elif dycon == "test_function":
            context_switchers = [should_start_context_test_function]
        else:
            raise ConfigError(
                f"Don't understand dynamic_context setting: {dycon!r}")

        context_switchers.extend(plugin.dynamic_context
                                 for plugin in self._plugins.context_switchers)

        should_start_context = combine_context_switchers(context_switchers)

        self._collector = Collector(
            should_trace=self._should_trace,
            check_include=self._check_include_omit_etc,
            should_start_context=should_start_context,
            file_mapper=self._file_mapper,
            timid=self.config.timid,
            branch=self.config.branch,
            warn=self._warn,
            concurrency=concurrency,
        )

        suffix = self._data_suffix_specified
        if suffix:
            if not isinstance(suffix, str):
                # if data_suffix=True, use .machinename.pid.random
                suffix = True
        elif self.config.parallel:
            if suffix is None:
                suffix = True
            elif not isinstance(suffix, str):
                suffix = bool(suffix)
        else:
            suffix = None

        self._init_data(suffix)

        self._collector.use_data(self._data, self.config.context)

        # Early warning if we aren't going to be able to support plugins.
        if self._plugins.file_tracers and not self._collector.supports_plugins:
            self._warn(
                "Plugin file tracers ({}) aren't supported with {}".format(
                    ", ".join(plugin._coverage_plugin_name
                              for plugin in self._plugins.file_tracers),
                    self._collector.tracer_name(),
                ))
            for plugin in self._plugins.file_tracers:
                plugin._coverage_enabled = False

        # Create the file classifying substructure.
        self._inorout = InOrOut(
            warn=self._warn,
            debug=(self._debug if self._debug.should('trace') else None),
        )
        self._inorout.configure(self.config)
        self._inorout.plugins = self._plugins
        self._inorout.disp_class = self._collector.file_disposition_class

        # It's useful to write debug info after initing for start.
        self._should_write_debug = True

        # Register our clean-up handlers.
        atexit.register(self._atexit)
        is_main = (threading.current_thread() == threading.main_thread())
        if is_main and not env.WINDOWS:
            # The Python docs seem to imply that SIGTERM works uniformly even
            # on Windows, but that's not my experience, and this agrees:
            # https://stackoverflow.com/questions/35772001/x/35792192#35792192
            self._old_sigterm = signal.signal(signal.SIGTERM, self._on_sigterm)