def _initDecorator(test_tags: Set[str], user_tags: Set[str]) -> Callable: """ Setup a decorator with `test_tags` and configure the environment variable with `user_tags` """ try: os.environ["TEST_TAGS"] = ",".join(user_tags) return tag(*test_tags) except ValueError: raise UnsatisfiedAssumption()
def assume(condition: Any) -> bool: """Calling ``assume`` is like an :ref:`assert <python:assert>` that marks the example as bad, rather than failing the test. This allows you to specify properties that you *assume* will be true, and let Hypothesis try to avoid similar examples in future. """ if not condition: raise UnsatisfiedAssumption() return True
def assume(condition): """Assert a precondition for this test. If this is not truthy then the test will abort but not fail and Hypothesis will make a "best effort" attempt to avoid similar examples in future. """ if not condition: raise UnsatisfiedAssumption() return True
def do_draw(self, data: ConjectureData) -> Ex: for _ in range(3): i = data.index try: data.start_example(MAPPED_SEARCH_STRATEGY_DO_DRAW_LABEL) result = self.pack(data.draw(self.mapped_strategy)) data.stop_example() return result except UnsatisfiedAssumption: data.stop_example(discard=True) if data.index == i: raise raise UnsatisfiedAssumption()
def do_draw(self, data: ConjectureData) -> Ex: with warnings.catch_warnings(): if isinstance(self.pack, type) and issubclass( self.pack, (abc.Mapping, abc.Set) ): warnings.simplefilter("ignore", BytesWarning) for _ in range(3): i = data.index try: data.start_example(MAPPED_SEARCH_STRATEGY_DO_DRAW_LABEL) result = self.pack(data.draw(self.mapped_strategy)) data.stop_example() return result except UnsatisfiedAssumption: data.stop_example(discard=True) if data.index == i: raise raise UnsatisfiedAssumption()
def reject(): raise UnsatisfiedAssumption()
def reject() -> NoReturn: raise UnsatisfiedAssumption()