Esempio n. 1
0
 def pytest_configure(config):
     core.running_under_pytest = True
     profile = config.getoption(LOAD_PROFILE_OPTION)
     if profile:
         settings.load_profile(profile)
     verbosity_name = config.getoption(VERBOSITY_OPTION)
     if verbosity_name:
         verbosity_value = Verbosity[verbosity_name]
         profile_name = "%s-with-%s-verbosity" % (
             settings._current_profile,
             verbosity_name,
         )
         # register_profile creates a new profile, exactly like the current one,
         # with the extra values given (in this case 'verbosity')
         settings.register_profile(profile_name, verbosity=verbosity_value)
         settings.load_profile(profile_name)
     seed = config.getoption(SEED_OPTION)
     if seed is not None:
         try:
             seed = int(seed)
         except ValueError:
             pass
         core.global_force_seed = seed
     config.addinivalue_line("markers",
                             "hypothesis: Tests which use hypothesis.")
    def pytest_configure(config):
        config.addinivalue_line("markers",
                                "hypothesis: Tests which use hypothesis.")
        if not _any_hypothesis_option(config):
            return
        from hypothesis import Phase, Verbosity, core, settings

        profile = config.getoption(LOAD_PROFILE_OPTION)
        if profile:
            settings.load_profile(profile)
        verbosity_name = config.getoption(VERBOSITY_OPTION)
        if verbosity_name and verbosity_name != settings.default.verbosity.name:
            verbosity_value = Verbosity[verbosity_name]
            name = f"{settings._current_profile}-with-{verbosity_name}-verbosity"
            # register_profile creates a new profile, exactly like the current one,
            # with the extra values given (in this case 'verbosity')
            settings.register_profile(name, verbosity=verbosity_value)
            settings.load_profile(name)
        if (config.getoption(EXPLAIN_OPTION)
                and Phase.explain not in settings.default.phases):
            name = f"{settings._current_profile}-with-explain-phase"
            phases = settings.default.phases + (Phase.explain, )
            settings.register_profile(name, phases=phases)
            settings.load_profile(name)

        seed = config.getoption(SEED_OPTION)
        if seed is not None:
            try:
                seed = int(seed)
            except ValueError:
                pass
            core.global_force_seed = seed
Esempio n. 3
0
def register_hypothesis_profiles():
    import hypothesis
    from hypothesis import settings, Verbosity, Phase

    stateful_step_count = int(os.getenv("PBT_STATEFUL_STEP_COUNT", 10))
    max_examples = int(os.getenv("PBT_MAX_EXAMPLES", 100))
    derandomize = True
    seed = int(os.getenv("PBT_SEED", 0))

    if seed != 0:
        patch_hypothesis_for_seed_handling(seed)
        derandomize = False

    patch_brownie_for_assertion_detection()

    settings.register_profile(
        "generate",
        stateful_step_count=stateful_step_count,
        max_examples=max_examples,
        phases=[Phase.generate],
        report_multiple_bugs=True,
        derandomize=derandomize,
        print_blob=True,
    )

    settings.register_profile(
        "shrinking",
        stateful_step_count=stateful_step_count,
        max_examples=max_examples,
        phases=[Phase.generate, Phase.shrink],
        report_multiple_bugs=True,
        derandomize=derandomize,
        print_blob=True,
    )
Esempio n. 4
0
def set_up_hypothesis() -> None:
    default_settings = settings(
        # Turn off the health checks because setUp/tearDown are too slow
        suppress_health_check=[HealthCheck.too_slow],
        # Turn off the example database; we don't have a way to persist this
        # or share this across runs, so we don't derive any benefit from it at
        # this time.
        database=None,
    )

    # Configure Hypothesis to run faster when iterating locally
    settings.register_profile(
        "dev", settings(default_settings, max_examples=5, timeout=0)
    )
    # ... and use the defaults (which have more combinations) when running
    # on CI, which we want to be more deterministic.
    settings.register_profile(
        "ci", settings(default_settings, derandomize=True, timeout=120)
    )

    # Use the dev profile by default, but use the ci profile on sandcastle.
    settings.load_profile(
        "ci" if is_sandcastle() else os.getenv("HYPOTHESIS_PROFILE", "dev")
    )

    # We need to set a global (but non-conflicting) path to store some state
    # during hypothesis example runs.  We want to avoid putting this state in
    # the repo.
    set_hypothesis_home_dir(tempfile.mkdtemp(prefix="eden_hypothesis."))
    atexit.register(cleanup_tmp_dir, pathlib.Path(hypothesis_home_dir()))
Esempio n. 5
0
def set_up_hypothesis() -> None:
    default_settings = settings(
        # Turn off the health checks because setUp/tearDown are too slow
        suppress_health_check=[HealthCheck.too_slow],
        # Turn off the example database; we don't have a way to persist this
        # or share this across runs, so we don't derive any benefit from it at
        # this time.
        database=None,
    )

    # Configure Hypothesis to run faster when iterating locally
    settings.register_profile(
        "dev", settings(default_settings, max_examples=5, timeout=0))
    # ... and use the defaults (which have more combinations) when running
    # on CI, which we want to be more deterministic.
    settings.register_profile(
        "ci", settings(default_settings, derandomize=True, timeout=120))

    # Use the dev profile by default, but use the ci profile on sandcastle.
    settings.load_profile(
        "ci" if is_sandcastle() else os.getenv("HYPOTHESIS_PROFILE", "dev"))

    # We need to set a global (but non-conflicting) path to store some state
    # during hypothesis example runs.  We want to avoid putting this state in
    # the repo.
    set_hypothesis_home_dir(tempfile.mkdtemp(prefix="eden_hypothesis."))
    atexit.register(cleanup_tmp_dir, pathlib.Path(hypothesis_home_dir()))
Esempio n. 6
0
def load_env() -> None:
    "Fixtures to load the hypothesis profile inside for all tests"
    settings.register_profile("ci", max_examples=200, deadline=None)
    settings.register_profile("dev",
                              max_examples=10,
                              deadline=timedelta(milliseconds=500))
    settings.load_profile(os.getenv("HYPOTHESIS_PROFILE", "dev"))
Esempio n. 7
0
def register_hypothesis_profiles():
    import hypothesis
    from hypothesis import settings, Verbosity, Phase

    derandomize = True

    if Options.SEED != 0:
        patch_hypothesis_for_seed_handling(Options.SEED)
        derandomize = False

    patch_hypothesis_for_fuzz_behavior()
    patch_brownie_for_assertion_detection()

    settings.register_profile(
        "generate",
        stateful_step_count=Options.STATEFUL_STEP_COUNT,
        max_examples=Options.MAX_EXAMPLES,
        phases=[Phase.generate],
        report_multiple_bugs=True,
        derandomize=derandomize,
        print_blob=True,
    )

    settings.register_profile(
        "shrinking",
        stateful_step_count=Options.STATEFUL_STEP_COUNT,
        max_examples=Options.MAX_EXAMPLES,
        phases=[Phase.generate, Phase.shrink],
        report_multiple_bugs=True,
        derandomize=derandomize,
        print_blob=True,
    )
Esempio n. 8
0
def _configure_hypothesis():
    from os import environ

    from hypothesis import (
        HealthCheck,
        settings,
    )

    settings.register_profile(
        "ci",
        suppress_health_check=[
            # CPU resources available to CI builds typically varies
            # significantly from run to run making it difficult to determine
            # if "too slow" data generation is a result of the code or the
            # execution environment.  Prevent these checks from
            # (intermittently) failing tests that are otherwise fine.
            HealthCheck.too_slow,
        ],
        # With the same reasoning, disable the test deadline.
        deadline=None,
    )

    profile_name = environ.get("MAGIC_FOLDER_HYPOTHESIS_PROFILE", "default")
    print("Loading Hypothesis profile {}".format(profile_name), file=stderr)
    settings.load_profile(profile_name)
Esempio n. 9
0
def run():
    warnings.filterwarnings('error', category=UnicodeWarning)
    warnings.filterwarnings('error', category=HypothesisDeprecationWarning)

    set_hypothesis_home_dir(mkdtemp())

    charmap()
    assert os.path.exists(charmap_file())
    assert isinstance(settings, type)

    # We do a smoke test here before we mess around with settings.
    x = settings()

    import hypothesis._settings as settings_module

    for s in settings_module.all_settings.values():
        v = getattr(x, s.name)
        # Check if it has a dynamically defined default and if so skip
        # comparison.
        if getattr(settings, s.name).show_default:
            assert v == s.default, '%r == x.%s != s.%s == %r' % (
                v,
                s.name,
                s.name,
                s.default,
            )

    settings.register_profile('default', settings(timeout=unlimited))

    settings.register_profile('speedy', settings(max_examples=5, ))

    settings.load_profile(os.getenv('HYPOTHESIS_PROFILE', 'default'))
Esempio n. 10
0
def pytest_configure(config):
    core.running_under_pytest = True
    profile = config.getoption(LOAD_PROFILE_OPTION)
    if profile:
        settings.load_profile(profile)
    verbosity_name = config.getoption(VERBOSITY_OPTION)
    if verbosity_name:
        verbosity_value = Verbosity[verbosity_name]
        profile_name = '%s-with-%s-verbosity' % (
            settings._current_profile, verbosity_name
        )
        # register_profile creates a new profile, exactly like the current one,
        # with the extra values given (in this case 'verbosity')
        settings.register_profile(profile_name, verbosity=verbosity_value)
        settings.load_profile(profile_name)
    seed = config.getoption(SEED_OPTION)
    file_name = config.getoption(OUTPUT_OPTION)
    global server_option, output_file_name
    server_option = config.getoption(SERVER_OPTION)
    if seed is not None:
        try:
            seed = int(seed)
        except ValueError:
            pass
        core.global_force_seed = seed

    if file_name is not None:
        output_file_name = file_name

    if server_option:
        delete_file(output_file_name)

    config.addinivalue_line(
        'markers',
        'hypothesis: Tests which use hypothesis.')
Esempio n. 11
0
class HypothesisProfiles:
    # Can't create examples folder thus not working as global
    db = ExampleDatabase("examples")

    def __init__(self):
        pass

    settings.register_profile(
        "bugfound",
        stateful_step_count=10,
        max_examples=10000,
        phases=[Phase.generate],
        report_multiple_bugs=True,
        derandomize=False,
        database=db,
        print_blob=True,
    )

    settings.register_profile(
        "bugfound_reuse",
        phases=[Phase.reuse],
        report_multiple_bugs=True,
        derandomize=False,
        database=db,
        print_blob=True,
    )

    settings.register_profile(
        "shrinking",
        phases=[Phase.reuse, Phase.shrink],
        report_multiple_bugs=True,
        derandomize=False,
        database=db,
        print_blob=True,
    )
Esempio n. 12
0
def _modify_hypothesis_settings(settings, name, parent):
    hp_settings.register_profile(
        name,
        parent=hp_settings.get_profile(parent),
        database=DirectoryBasedExampleDatabase(
            _get_data_folder().joinpath("hypothesis")),
        **settings,
    )
    hp_settings.load_profile(name)
Esempio n. 13
0
    def run_and_statis(self,
                       max_examples=100,
                       opset_version=[7, 9, 15],
                       reproduce=None,
                       min_success_num=25,
                       max_duration=-1):
        if os.getenv('HYPOTHESIS_TEST_PROFILE', 'ci') == "dev":
            max_examples *= 10
            min_success_num *= 10
            # while at ce phase, there's no limit on time
            max_duration = -1
        start_time = time.time()
        settings.register_profile(
            "ci",
            max_examples=max_examples,
            suppress_health_check=hypothesis.HealthCheck.all(),
            deadline=None,
            print_blob=True,
            derandomize=True,
            report_multiple_bugs=False,
        )
        settings.load_profile("ci")

        def sample_convert_generator(draw):
            return self.sample_convert_config(draw)

        def run_test(configs):
            return self.run_test(configs=configs)

        generator = st.composite(sample_convert_generator)
        loop_func = given(generator())(run_test)
        if reproduce is not None:
            loop_func = reproduce(loop_func)
        logging.info("Start to running test of {}".format(type(self)))

        paddle.disable_static()
        loop_func()

        logging.info(
            "===================Statistical Information===================")
        logging.info("Number of Generated Programs: {}".format(
            self.num_ran_models))
        successful_ran_programs = int(self.num_ran_models)
        if successful_ran_programs < min_success_num:
            logging.warning("satisfied_programs = ran_programs")
            logging.error(
                "At least {} programs need to ran successfully, but now only about {} programs satisfied."
                .format(min_success_num, successful_ran_programs))
            assert False
        used_time = time.time() - start_time
        logging.info("Used time: {} s".format(round(used_time, 2)))
        if max_duration > 0 and used_time > max_duration:
            logging.error(
                "The duration exceeds {} seconds, if this is neccessary, try to set a larger number for parameter `max_duration`."
                .format(max_duration))
            assert False
Esempio n. 14
0
def run():
    filterwarnings('error')
    filterwarnings('ignore', category=ImportWarning)
    filterwarnings('ignore', category=FutureWarning, module='pandas._version')

    # See https://github.com/numpy/numpy/pull/432
    filterwarnings('ignore', message='numpy.dtype size changed')
    filterwarnings('ignore', message='numpy.ufunc size changed')

    # Imported by Pandas in version 1.9, but fixed in later versions.
    filterwarnings(
        'ignore',
        message='Importing from numpy.testing.decorators is deprecated')
    filterwarnings(
        'ignore',
        message='Importing from numpy.testing.nosetester is deprecated')

    new_home = mkdtemp()
    set_hypothesis_home_dir(new_home)
    assert settings.default.database.path.startswith(new_home)

    charmap()
    assert os.path.exists(charmap_file()), charmap_file()
    assert isinstance(settings, type)

    # We do a smoke test here before we mess around with settings.
    x = settings()

    import hypothesis._settings as settings_module

    for s in settings_module.all_settings.values():
        v = getattr(x, s.name)
        # Check if it has a dynamically defined default and if so skip
        # comparison.
        if getattr(settings, s.name).show_default:
            assert v == s.default, '%r == x.%s != s.%s == %r' % (
                v,
                s.name,
                s.name,
                s.default,
            )

    settings.register_profile(
        'default',
        settings(max_examples=10 if IN_COVERAGE_TESTS else not_set,
                 timeout=unlimited,
                 use_coverage=not IN_COVERAGE_TESTS))

    settings.register_profile('with_coverage',
                              settings(
                                  timeout=unlimited,
                                  use_coverage=True,
                              ))

    settings.register_profile('speedy', settings(max_examples=5, ))

    settings.register_profile('debug', settings(verbosity=Verbosity.debug))

    settings.load_profile(os.getenv('HYPOTHESIS_PROFILE', 'default'))
Esempio n. 15
0
def _configure_hypothesis():
    from os import environ

    from hypothesis import (
        HealthCheck,
        settings,
    )

    settings.register_profile(
        "long",
        max_examples=100000,
    )

    profile_name = environ.get("PRIVACYPASS_HYPOTHESIS_PROFILE", "default")
    settings.load_profile(profile_name)
Esempio n. 16
0
def _setup_hypothesis():
    settings.register_profile(
        "ci",
        suppress_health_check=[
            # CPU resources available to CI builds typically varies
            # significantly from run to run making it difficult to determine
            # if "too slow" data generation is a result of the code or the
            # execution environment.  Prevent these checks from
            # (intermittently) failing tests that are otherwise fine.
            HealthCheck.too_slow,
        ],
        # By the same reasoning as above, disable the deadline check.
        deadline=None,
    )
    settings.load_profile(environ.get("TXKUBE_HYPOTHESIS_PROFILE", "default"))
Esempio n. 17
0
def setup_hypothesis():
    from hypothesis import settings, Verbosity

    settings.register_profile("lots", max_examples=100_000)
    settings.register_profile("ci", max_examples=1000)
    settings.register_profile("dev", max_examples=10)
    settings.register_profile("debug",
                              max_examples=10,
                              verbosity=Verbosity.verbose)
Esempio n. 18
0
def _modify_hypothesis_settings(settings, name, parent=None):
    settings = settings.copy()
    if parent is None:
        parent = hp_settings._current_profile

    if "phases" in settings:
        try:
            settings["phases"] = [getattr(Phase, k) for k, v in settings["phases"].items() if v]
        except AttributeError as exc:
            raise ValueError(f"'{exc.args[0]}' is not a valid hypothesis phase setting")

    hp_settings.register_profile(
        name,
        parent=hp_settings.get_profile(parent),
        database=DirectoryBasedExampleDatabase(_get_data_folder().joinpath("hypothesis")),
        **settings,
    )
    hp_settings.load_profile(name)
Esempio n. 19
0
def _init_hypothesis():
    # type: () -> None
    from os import environ

    if "CI" in environ:
        try:
            from hypothesis import HealthCheck, settings
        except ImportError:
            return

        settings.register_profile(
            "patience",
            settings(suppress_health_check=[
                HealthCheck.too_slow,
                HealthCheck.filter_too_much,
            ]),
        )
        settings.load_profile("patience")
Esempio n. 20
0
def _configure_hypothesis():
    from os import environ

    from hypothesis import (
        HealthCheck,
        settings,
    )

    # if you add more profiles here, note that profile names aren't
    # namespaced in any way and Hypothesis allows profile name
    # collisions to pass silently, then more or less randomly picks
    # one definition to use when you try to activate the name. So
    # please prefix any other profiles in here with "magic-folder-"
    # for a somewhat lower chance of collision.

    settings.register_profile(
        "magic-folder-fast",
        max_examples=1,
        # see magic-folder-ci profile below for justification
        suppress_health_check=[
            HealthCheck.too_slow,
        ],
        deadline=60 * 10 *
        1000,  # _some_ number that's not "forever" (milliseconds)
    )

    settings.register_profile(
        "magic-folder-ci",
        suppress_health_check=[
            # CPU resources available to CI builds typically varies
            # significantly from run to run making it difficult to determine
            # if "too slow" data generation is a result of the code or the
            # execution environment.  Prevent these checks from
            # (intermittently) failing tests that are otherwise fine.
            HealthCheck.too_slow,
        ],
        # With the same reasoning, disable the test deadline.
        deadline=60 * 10 *
        1000,  # _some_ number that's not "forever" (milliseconds)
    )

    profile_name = environ.get("MAGIC_FOLDER_HYPOTHESIS_PROFILE", "default")
    print("Loading Hypothesis profile {}".format(profile_name), file=stderr)
    settings.load_profile(profile_name)
Esempio n. 21
0
def _make_registrations():
    arraylib.make_registrations()
    builtinslib.make_registrations()
    collectionslib.make_registrations()
    datetimelib.make_registrations()
    mathlib.make_registrations()
    randomlib.make_registrations()
    relib.make_registrations()
    opcode_intercept.make_registrations()

    plugin_entries = entry_points(group="crosshair.plugin")
    for plugin_entry in plugin_entries:
        installed_plugins.append(plugin_entry.name)
        plugin_entry.load()

    # We monkey patch icontract below to prevent it from enforcing contracts.
    # (we want to control how and when they run)
    # TODO: consider a better home for this code
    try:
        import icontract

        if LooseVersion(icontract.__version__) < LooseVersion("2.4.0"):
            raise Exception("CrossHair requires icontract version >= 2.4.0")

        icontract._checkers._assert_invariant = lambda *a, **kw: None
        icontract._checkers._assert_preconditions = lambda *a, **kw: None
        icontract._checkers._assert_postconditions = lambda *a, **kw: None
    except ImportError:
        pass

    # Set hypothesis to run in a minimal mode.
    # (auditwall will yell if hypothesis tries to write to disk)
    # TODO: figure out some other way to set options via fuzz_one_input.
    try:
        from hypothesis import settings, Phase

        settings.register_profile("ch", database=None, phases=[Phase.generate])
        settings.load_profile("ch")
    except ImportError:
        pass
Esempio n. 22
0
def run():
    filterwarnings('error')
    filterwarnings('ignore', category=ImportWarning)
    filterwarnings('ignore', category=FutureWarning, module='pandas._version')

    # Fixed in recent versions but allowed by pytest=3.0.0; see #1630
    filterwarnings('ignore', category=DeprecationWarning, module='pluggy')

    # See https://github.com/numpy/numpy/pull/432
    filterwarnings('ignore', message='numpy.dtype size changed')
    filterwarnings('ignore', message='numpy.ufunc size changed')

    # Imported by Pandas in version 1.9, but fixed in later versions.
    filterwarnings(
        'ignore',
        message='Importing from numpy.testing.decorators is deprecated'
    )
    filterwarnings(
        'ignore',
        message='Importing from numpy.testing.nosetester is deprecated'
    )

    new_home = mkdtemp()
    set_hypothesis_home_dir(new_home)
    assert settings.default.database.path.startswith(new_home)

    charmap()
    assert os.path.exists(charmap_file()), charmap_file()
    assert isinstance(settings, type)

    # We do a smoke test here before we mess around with settings.
    x = settings()

    import hypothesis._settings as settings_module

    for s in settings_module.all_settings.values():
        v = getattr(x, s.name)
        # Check if it has a dynamically defined default and if so skip
        # comparison.
        if getattr(settings, s.name).show_default:
            assert v == s.default, '%r == x.%s != s.%s == %r' % (
                v, s.name, s.name, s.default,
            )

    settings.register_profile('default', settings(
        max_examples=10 if IN_COVERAGE_TESTS else not_set,
        timeout=unlimited,
    ))

    settings.register_profile(
        'speedy', settings(
            max_examples=5,
        ))

    settings.register_profile('debug', settings(verbosity=Verbosity.debug))

    settings.load_profile(os.getenv('HYPOTHESIS_PROFILE', 'default'))
Esempio n. 23
0
def _hypothesis_sexagesimal_strategy():
    """We define hypothesis strategy to generate Sexagesimal values in tests"""
    from hypothesis import HealthCheck, settings
    from hypothesis.strategies import (
        builds,
        decimals,
        integers,
        lists,
        register_type_strategy,
        sampled_from,
        tuples,
    )

    from kanon.units import Historical, Sexagesimal

    settings.register_profile("def",
                              suppress_health_check=(HealthCheck.too_slow, ))
    settings.load_profile("def")

    strat = builds(
        Sexagesimal,
        lists(integers(0, 59), max_size=2),
        lists(integers(0, 59), max_size=2),
        remainder=decimals(0, 1).filter(lambda x: x != 1),
        sign=sampled_from((-1, 1)),
    )
    register_type_strategy(Sexagesimal, strat)

    strat = builds(
        Historical,
        tuples(integers(0, 9), integers(0, 11), integers(0, 29)),
        lists(integers(0, 59), max_size=2),
        remainder=decimals(0, 1).filter(lambda x: x != 1),
        sign=sampled_from((-1, 1)),
    )
    register_type_strategy(Historical, strat)
def pytest_configure(config):
    core.running_under_pytest = True
    profile = config.getoption(LOAD_PROFILE_OPTION)
    if profile:
        settings.load_profile(profile)
    verbosity_name = config.getoption(VERBOSITY_OPTION)
    if verbosity_name:
        verbosity_value = Verbosity[verbosity_name]
        profile_name = "%s-with-%s-verbosity" % (
            settings._current_profile,
            verbosity_name,
        )
        # register_profile creates a new profile, exactly like the current one,
        # with the extra values given (in this case 'verbosity')
        settings.register_profile(profile_name, verbosity=verbosity_value)
        settings.load_profile(profile_name)
    seed = config.getoption(SEED_OPTION)
    if seed is not None:
        try:
            seed = int(seed)
        except ValueError:
            pass
        core.global_force_seed = seed
    config.addinivalue_line("markers", "hypothesis: Tests which use hypothesis.")
Esempio n. 25
0
def _configure_hypothesis():
    """
    Select define Hypothesis profiles and select one based on environment
    variables.
    """
    from os import environ

    from hypothesis import (
        HealthCheck,
        settings,
    )

    base = dict(
        suppress_health_check=[
            # CPU resources available to builds typically varies significantly
            # from run to run making it difficult to determine if "too slow"
            # data generation is a result of the code or the execution
            # environment.  Prevent these checks from (intermittently) failing
            # tests that are otherwise fine.
            HealthCheck.too_slow,
        ],
        # With the same reasoning, disable the test deadline.
        deadline=None,
    )

    settings.register_profile("default", **base)

    settings.register_profile(
        "ci",
        # Make CI runs a little more aggressive in amount of coverage they try
        # to provide.
        max_examples=200,
        **base)

    settings.register_profile(
        "big",
        max_examples=10000,
        # The only rule-based state machine we have now is quite simple and
        # can probably be completely explored in about 5 steps.  Give it some
        # headroom beyond that in case I'm wrong but don't let it run to the
        # full 50 because, combined with searching for 10000 successful
        # examples this makes the stateful test take *ages* to complete.
        stateful_step_count=15,
        **base)

    profile_name = environ.get("ZKAPAUTHORIZER_HYPOTHESIS_PROFILE", "default")
    settings.load_profile(profile_name)
    print("Loaded profile {}".format(profile_name))
Esempio n. 26
0
def run():
    filterwarnings('error')
    filterwarnings('ignore', category=ImportWarning)
    filterwarnings('ignore', category=FutureWarning, module='pandas._version')
    # Only applies to Django 1.8, so this filter will go very soon!
    filterwarnings('ignore',
                   category=DeprecationWarning,
                   module='tests.django.toystore.models')

    set_hypothesis_home_dir(mkdtemp())

    charmap()
    assert os.path.exists(charmap_file()), charmap_file()
    assert isinstance(settings, type)

    # We do a smoke test here before we mess around with settings.
    x = settings()

    import hypothesis._settings as settings_module

    for s in settings_module.all_settings.values():
        v = getattr(x, s.name)
        # Check if it has a dynamically defined default and if so skip
        # comparison.
        if getattr(settings, s.name).show_default:
            assert v == s.default, '%r == x.%s != s.%s == %r' % (
                v,
                s.name,
                s.name,
                s.default,
            )

    settings.register_profile(
        'default',
        settings(timeout=unlimited, use_coverage=not IN_COVERAGE_TESTS))

    settings.register_profile('with_coverage',
                              settings(
                                  timeout=unlimited,
                                  use_coverage=True,
                              ))

    settings.register_profile('speedy', settings(max_examples=5, ))

    settings.load_profile(os.getenv('HYPOTHESIS_PROFILE', 'default'))
Esempio n. 27
0
def run():
    warnings.filterwarnings(u'error', category=UnicodeWarning)

    set_hypothesis_home_dir(mkdtemp())

    charmap()
    assert os.path.exists(charmap_file())
    assert isinstance(settings, type)

    settings.register_profile('default', settings(timeout=-1, strict=True))

    settings.register_profile('speedy', settings(
        timeout=1,
        max_examples=5,
    ))

    settings.register_profile('nonstrict', settings(strict=False))

    settings.load_profile(os.getenv('HYPOTHESIS_PROFILE', 'default'))
Esempio n. 28
0
def run(deprecations_as_errors=True):
    warnings.filterwarnings('error', category=UnicodeWarning)
    # This catches deprecations in our dependencies, as well as internally
    # (because HypothesisDeprecationWarning subclasses DeprecationWarning)
    if deprecations_as_errors:  # disabled for old versions of Django
        warnings.filterwarnings('error', category=DeprecationWarning)

    set_hypothesis_home_dir(mkdtemp())

    charmap()
    assert os.path.exists(charmap_file()), charmap_file()
    assert isinstance(settings, type)

    # We do a smoke test here before we mess around with settings.
    x = settings()

    import hypothesis._settings as settings_module

    for s in settings_module.all_settings.values():
        v = getattr(x, s.name)
        # Check if it has a dynamically defined default and if so skip
        # comparison.
        if getattr(settings, s.name).show_default:
            assert v == s.default, '%r == x.%s != s.%s == %r' % (
                v,
                s.name,
                s.name,
                s.default,
            )

    settings.register_profile(
        'default',
        settings(timeout=unlimited, use_coverage=not IN_COVERAGE_TESTS))

    settings.register_profile('with_coverage',
                              settings(
                                  timeout=unlimited,
                                  use_coverage=True,
                              ))

    settings.register_profile('speedy', settings(max_examples=5, ))

    settings.load_profile(os.getenv('HYPOTHESIS_PROFILE', 'default'))
Esempio n. 29
0
def _configure_hypothesis():
    """
    Select define Hypothesis profiles and select one based on environment
    variables.
    """
    from os import environ

    from hypothesis import (
        HealthCheck,
        settings,
    )

    base = dict(
        suppress_health_check=[
            # CPU resources available to builds typically varies significantly
            # from run to run making it difficult to determine if "too slow"
            # data generation is a result of the code or the execution
            # environment.  Prevent these checks from (intermittently) failing
            # tests that are otherwise fine.
            HealthCheck.too_slow,
        ],
        # With the same reasoning, disable the test deadline.
        deadline=None,
    )

    settings.register_profile("default", **base)

    settings.register_profile(
        "ci",
        # Make CI runs a little more aggressive in amount of coverage they try
        # to provide.
        max_examples=200,
        **base)

    settings.register_profile("big", max_examples=10000, **base)

    profile_name = environ.get("ZKAPAUTHORIZER_HYPOTHESIS_PROFILE", "default")
    settings.load_profile(profile_name)
    print("Loaded profile {}".format(profile_name))
Esempio n. 30
0
def run():
    warnings.filterwarnings(u'error', category=UnicodeWarning)

    set_hypothesis_home_dir(mkdtemp())

    charmap()
    assert os.path.exists(charmap_file())
    assert isinstance(settings, type)

    settings.register_profile(
        'default', settings(timeout=-1, strict=True)
    )

    settings.register_profile(
        'speedy', settings(
            timeout=1, max_examples=5,
        ))

    settings.register_profile(
        'nonstrict', settings(strict=False)
    )

    settings.load_profile(os.getenv('HYPOTHESIS_PROFILE', 'default'))
Esempio n. 31
0
def run():
    warnings.filterwarnings('error', category=UnicodeWarning)
    warnings.filterwarnings('error', category=HypothesisDeprecationWarning)

    set_hypothesis_home_dir(mkdtemp())

    charmap()
    assert os.path.exists(charmap_file()), charmap_file()
    assert isinstance(settings, type)

    # We do a smoke test here before we mess around with settings.
    x = settings()

    import hypothesis._settings as settings_module

    for s in settings_module.all_settings.values():
        v = getattr(x, s.name)
        # Check if it has a dynamically defined default and if so skip
        # comparison.
        if getattr(settings, s.name).show_default:
            assert v == s.default, '%r == x.%s != s.%s == %r' % (
                v, s.name, s.name, s.default,
            )

    settings.register_profile('default', settings(
        timeout=unlimited, use_coverage=not (IN_COVERAGE_TESTS or PYPY)))

    settings.register_profile('with_coverage', settings(
        timeout=unlimited, use_coverage=True,
    ))

    settings.register_profile(
        'speedy', settings(
            max_examples=5,
        ))

    settings.load_profile(os.getenv('HYPOTHESIS_PROFILE', 'default'))
Esempio n. 32
0
from hypothesis import given, strategies as st, assume, example, settings
from shrinker import shrink
from hashlib import sha1
import json
import os

settings.register_profile(
    'default', settings(max_examples=200)
)

settings.load_profile(os.getenv('HYPOTHESIS_PROFILE', 'default'))


@example(1, b'\x01')
@example(3, b'\x00\x00\x01')
@given(st.integers(0, 10), st.binary(average_size=20))
def test_shrink_length_language(n, b):
    assume(len(b) >= n)
    best = shrink(b, lambda x: len(x) >= n)
    assert best == bytes(n)


@given(st.binary())
def test_shrink_messy(s):
    b = sha1(s).digest()[0]
    shrunk = shrink(s, lambda x: sha1(x).digest()[0] == b)
    assert sha1(shrunk).digest()[0] == b


def is_valid_json(string):
    try:
import warnings
from tempfile import mkdtemp
import unicodenazi

warnings.filterwarnings('error', category=UnicodeWarning)
unicodenazi.enable()

from hypothesis import settings
from hypothesis.configuration import set_hypothesis_home_dir

set_hypothesis_home_dir(mkdtemp())

assert isinstance(settings, type)

settings.register_profile(
    'default', settings(timeout=-1, strict=True)
)
settings.load_profile('default')

import inspect
import os


TESTS = [
    'test_testdecorators',
]

import sys
sys.path.append(os.path.join(
    os.path.dirname(__file__), "..", "tests", "cover",
))
Esempio n. 34
0
    })


@pytest.fixture
def create(tmpdir):
    def inner(name, content, list_name='default'):
        tmpdir.ensure_dir(list_name).join(name).write(
            'BEGIN:VCALENDAR\n'
            'BEGIN:VTODO\n' +
            content +
            'END:VTODO\n'
            'END:VCALENDAR'
        )

    return inner


settings.register_profile("ci", settings(
    max_examples=1000,
    verbosity=Verbosity.verbose,
    suppress_health_check=[HealthCheck.too_slow]
))
settings.register_profile("deterministic", settings(
    derandomize=True,
))

if os.getenv('DETERMINISTIC_TESTS', 'false').lower() == 'true':
    settings.load_profile("deterministic")
elif os.getenv('CI', 'false').lower() == 'true':
    settings.load_profile("ci")
Esempio n. 35
0
    "incidentTypesText",
    "incidents",
    "locationNames",
    "locations",
    "radialHours",
    "radialMinutes",
    "rangerHandles",
    "rangers",
    "reportEntries",
    "rodGarettAddresses",
    "textOnlyAddresses",
)

settings.register_profile(
    "ci",
    settings(
        deadline=None,
        suppress_health_check=[HealthCheck.too_slow],
    ))
if getenv("CI") == "true":
    settings.load_profile("ci")

##
# DateTimes
##


@composite
def timeZones(draw: Callable) -> TimeZone:
    offset = draw(integers(min_value=-(60 * 24) + 1, max_value=(60 * 24) - 1))
    timeDelta = TimeDelta(minutes=offset)
    timeZone = TimeZone(offset=timeDelta, name=f"{offset}s")
Esempio n. 36
0
from os import environ

from hypothesis import settings

settings.register_profile('dev', settings(max_examples=10))
settings.register_profile('ci', settings())


if environ.get('CI', False):
    settings.load_profile('ci')
else:
    settings.load_profile('dev')
import os

import pytest
from flask import Flask
from hypothesis import settings

settings.register_profile('slow', settings(max_examples=200))
settings.register_profile('fast', settings(max_examples=20))
settings.load_profile(os.getenv(u'HYPOTHESIS_PROFILE', 'fast'))


@pytest.fixture(autouse=True)
def flask(request):
    app = Flask(__name__)
    ctx = app.test_request_context('/')
    ctx.push()

    request.addfinalizer(ctx.pop)
Esempio n. 38
0
@pytest.fixture(autouse=True)
def setup_logging():
    click_log.basic_config('vdirsyncer').setLevel(logging.DEBUG)


try:
    import pytest_benchmark
except ImportError:
    @pytest.fixture
    def benchmark():
        return lambda x: x()
else:
    del pytest_benchmark


settings.suppress_health_check = [HealthCheck.too_slow]

settings.register_profile("ci", settings(
    max_examples=1000,
    verbosity=Verbosity.verbose,
))
settings.register_profile("deterministic", settings(
    derandomize=True,
))

if os.environ['DETERMINISTIC_TESTS'].lower() == 'true':
    settings.load_profile("deterministic")
elif os.environ['CI'].lower() == 'true':
    settings.load_profile("ci")
Esempio n. 39
0
import pytest
from hypothesis import settings, Verbosity

import betamax
from betamax_serializers import pretty_json
from unittest import mock

log = logging.getLogger(__name__)

betamax.Betamax.register_serializer(pretty_json.PrettyJSONSerializer)

ontap_username = os.environ.get('ONTAP_USERNAME', 'user-placeholder')
ontap_password = os.environ.get('ONTAP_PASSWORD', 'password-placeholder')

settings.register_profile("ci", settings(max_examples=10))
settings.register_profile("exhaustive", settings(max_examples=400))
settings.register_profile("dev", settings(max_examples=10))
settings.register_profile("debug", settings(max_examples=10,
                                            verbosity=Verbosity.verbose))
settings.load_profile(os.getenv(u'HYPOTHESIS_PROFILE', 'dev'))


def pytest_addoption(parser):
    parser.addoption("--runslow", action="store_true",
                     help="run slow tests")
    parser.addoption("--betamax-record-mode", action="store", default="never",
                     help=("Use betamax recording option "
                           "(once, new_episodes, never)"))

Esempio n. 40
0
import pickle
import re
from hypothesis import given, settings, unlimited, Verbosity, errors
import hypothesis.strategies as st
import array
import pyroaring
from pyroaring import BitMap, FrozenBitMap

is_python2 = sys.version_info < (3, 0)

try:  # Python2 compatibility
    range = xrange
except NameError:
    pass

settings.register_profile("ci", settings(
    max_examples=500, deadline=None, timeout=unlimited))
settings.register_profile("dev", settings(max_examples=10, deadline=2000))
settings.register_profile("debug", settings(
    max_examples=10, verbosity=Verbosity.verbose, deadline=2000))
try:
    env = os.getenv('HYPOTHESIS_PROFILE', 'dev')
    settings.load_profile(env)
except errors.InvalidArgument:
    sys.exit('Unknown hypothesis profile: %s.' % env)

uint18 = st.integers(min_value=0, max_value=2**18)
uint32 = st.integers(min_value=0, max_value=2**32-1)
integer = st.integers(min_value=0, max_value=2**31-1)

range_max_size = 2**18
Esempio n. 41
0
"""

from bidict import (
    OrderedBidirectionalMapping, IGNORE, OVERWRITE, RAISE,
    bidict, loosebidict, looseorderedbidict, orderedbidict,
    frozenbidict, frozenorderedbidict)
from bidict.compat import iteritems, viewitems
from collections import OrderedDict
from hypothesis import given, settings
from hypothesis.strategies import integers, lists, tuples
from os import getenv
import pytest


# https://groups.google.com/d/msg/hypothesis-users/8FVs--1yUl4/JEkJ02euEwAJ
settings.register_profile('default', settings(strict=True))
settings.load_profile(getenv('HYPOTHESIS_PROFILE', 'default'))


def to_inv_odict(items):
    return OrderedDict((v, k) for (k, v) in items)


def prune_dup_vals(items):
    return list(iteritems(to_inv_odict(iteritems(to_inv_odict(items)))))


ondupbehaviors = (IGNORE, OVERWRITE, RAISE)
mutable_bidict_types = (bidict, loosebidict, looseorderedbidict, orderedbidict)
bidict_types = mutable_bidict_types + (frozenbidict, frozenorderedbidict)
mutating_methods_by_arity = {
Esempio n. 42
0
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

"""Set up hypothesis."""

from os import getenv
from hypothesis import HealthCheck, settings, unlimited


MAX_EXAMPLES_DEFAULT = 200
NOCHECK_SLOW = (HealthCheck.hung_test, HealthCheck.too_slow)
PROFILE_DEFAULT = {
    'max_examples': int(getenv('HYPOTHESIS_MAX_EXAMPLES') or MAX_EXAMPLES_DEFAULT),
    'deadline': None,
    'timeout': unlimited,
    # Enabling coverage slows down hypothesis.
    'suppress_health_check': NOCHECK_SLOW if getenv('COVERAGE') else (),
}
PROFILE_MORE_EXAMPLES = dict(
    PROFILE_DEFAULT,
    max_examples=int(getenv('HYPOTHESIS_MAX_EXAMPLES') or MAX_EXAMPLES_DEFAULT * 10),
    suppress_health_check=NOCHECK_SLOW,
)
settings.register_profile('default', **PROFILE_DEFAULT)
settings.register_profile('more-examples', **PROFILE_MORE_EXAMPLES)


def load_profile(name=getenv('HYPOTHESIS_PROFILE') or 'default'):
    """Load the Hypothesis profile with the given name."""
    settings.load_profile(name)
Esempio n. 43
0
import json
import os

from pytest import fixture
from hypothesis import settings, HealthCheck

from chalice.app import Chalice

# From:
# http://hypothesis.readthedocs.io/en/latest/settings.html#settings-profiles
# On travis we'll have it run through more iterations.
settings.register_profile(
    'ci', settings(max_examples=2000,
                   suppress_health_check=[HealthCheck.too_slow]),
)
# When you're developing locally, we'll only run a few examples
# to keep unit tests fast.  If you want to run more iterations
# locally just set HYPOTHESIS_PROFILE=ci.
settings.register_profile('dev', settings(max_examples=10))
settings.load_profile(os.getenv('HYPOTHESIS_PROFILE', 'dev'))

print("HYPOTHESIS PROFILE: %s" % os.environ.get("HYPOTHESIS_PROFILE"))


@fixture(autouse=True)
def ensure_no_local_config(no_local_config):
    pass


@fixture
def sample_app():
Esempio n. 44
0
"""
Property-based tests for encoding/decoding methods.

These ones pass, just as you'd hope!

"""
from __future__ import absolute_import, division, print_function

import hypothesis.extra.numpy as npst
import hypothesis.strategies as st
from hypothesis import given, settings

import xarray as xr

# Run for a while - arrays are a bigger search space than usual
settings.register_profile("ci", deadline=None)
settings.load_profile("ci")


an_array = npst.arrays(
    dtype=st.one_of(
        npst.unsigned_integer_dtypes(),
        npst.integer_dtypes(),
        npst.floating_dtypes(),
    ),
    shape=npst.array_shapes(max_side=3),  # max_side specified for performance
)


@given(st.data(), an_array)
def test_CFMask_coder_roundtrip(data, arr):
Esempio n. 45
0
import unicodenazi
from hypothesis import settings, unlimited
from hypothesis.errors import HypothesisDeprecationWarning
from hypothesis.configuration import set_hypothesis_home_dir

warnings.filterwarnings('error', category=UnicodeWarning)
warnings.filterwarnings('error', category=HypothesisDeprecationWarning)
unicodenazi.enable()


set_hypothesis_home_dir(mkdtemp())

assert isinstance(settings, type)

settings.register_profile(
    'default', settings(timeout=unlimited)
)
settings.load_profile('default')


TESTS = [
    'test_testdecorators',
]

sys.path.append(os.path.join(
    os.path.dirname(__file__), '..', 'tests', 'cover',
))

if __name__ == '__main__':
    for t in TESTS:
        module = __import__(t)
# -*- coding: utf-8 -*-
from hypothesis import settings
from hypothesis.strategies import (
    fixed_dictionaries, lists,
    text, integers, text, none,
    composite, one_of,
    just)

from functools import partial
import re

settings.register_profile("unit", settings(
    database=None,
    max_examples=1,
))
settings.load_profile("unit")


_descriptive_alphabet = (
    # start with some normal alphanumerics
    u"abcdefghijklmnopABCDEFGHIJKLMNOP123"
    # some characters likely to fudge up poor escaping
    u"\"\'<&%"
    # some printable unicode oddities
    u"£Ⰶⶼ"
    # various weird types of spaces
    u" \u200d\u2029\u202f"
)
_nonspace_cluster_re = re.compile(r"\S+", flags=re.UNICODE)

Esempio n. 47
0
from hypothesis import settings, Verbosity
import os


settings.register_profile("ci", settings(min_satisfying_examples=1000))
settings.register_profile("dev", settings(max_examples=10))
settings.register_profile("debug", settings(max_examples=10,
                                            verbosity=Verbosity.verbose))
settings.load_profile(os.getenv(u'HYPOTHESIS_PROFILE', 'default'))
Esempio n. 48
0
from hypothesis import settings, Verbosity, HealthCheck

settings.register_profile("ci", settings(
    max_examples=1000,
    suppress_health_check=[HealthCheck.too_slow]))
settings.register_profile("dev", settings(max_examples=200, suppress_health_check=[HealthCheck.too_slow]))
settings.register_profile(
    "debug", settings(max_examples=10, verbosity=Verbosity.verbose, suppress_health_check=[HealthCheck.too_slow]))


def pytest_addoption(parser):
    parser.addoption("--data-integrity", action="store_true",
                     help="run data integrity tests")
Esempio n. 49
0
# need to determine who owns an individual contribution.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
#
# END HEADER

from __future__ import division, print_function, absolute_import

import os
import hypothesis.strategies as st
from hypothesis import find, settings, given

settings.register_profile('benchmarking', settings(
    database=None,
))


import pytest
import random


def setup_module():
    settings.load_profile('benchmarking')


def teardown_module():
    settings.load_profile(os.getenv('HYPOTHESIS_PROFILE', 'default'))

Esempio n. 50
0
import platform
import os
import sys

from hypothesis import settings, HealthCheck

impl = platform.python_implementation()

settings.register_profile("ci", settings(max_examples=1000,
                                         suppress_health_check=[HealthCheck.too_slow]))
settings.register_profile("pypy", settings(suppress_health_check=[HealthCheck.too_slow]))

settings.load_profile(os.getenv("HYPOTHESIS_PROFILE",
                                "default" if impl != "PyPy" else "pypy"))

# serve can't even be imported on Py3, so totally ignore it even from collection
collect_ignore = []
if sys.version_info[0] >= 3:
    serve = os.path.join(os.path.dirname(__file__), "serve")
    collect_ignore.extend([os.path.join(root, f)
                           for root, _, files in os.walk(serve)
                           for f in files])
Esempio n. 51
0
"""
Tests for the eliot package.
"""

# Increase hypothesis deadline so we don't time out on PyPy:
from hypothesis import settings

settings.register_profile("eliot", deadline=1000)
settings.load_profile("eliot")
Esempio n. 52
0
    "incidentTypesText",
    "incidents",
    "locationNames",
    "locations",
    "radialHours",
    "radialMinutes",
    "rangerHandles",
    "rangers",
    "reportEntries",
    "rodGarettAddresses",
    "textOnlyAddresses",
)


settings.register_profile(
    "ci", settings(suppress_health_check=[HealthCheck.too_slow])
)
if getenv("CI") == "true":
    settings.load_profile("ci")


##
# DateTimes
##


@composite
def timeZones(draw: Callable) -> TimeZone:
    offset = draw(integers(min_value=-(60 * 24) + 1, max_value=(60 * 24) - 1))
    timeDelta = TimeDelta(minutes=offset)
    timeZone = TimeZone(offset=timeDelta, name=f"{offset}s")
import pytest

from   hypothesis import given, settings
import hypothesis.strategies as st

from radical.entk import Pipeline, Stage, Task
from radical.entk import states
from radical.entk.exceptions import *


# ------------------------------------------------------------------------------
#

# Hypothesis settings
settings.register_profile("travis", max_examples=100, deadline=None)
settings.load_profile("travis")

def test_stage_initialization():
    """
    ***Purpose***: Test if all attributes have, thus expect, the
    correct data types
    """

    s = Stage()

    assert s.uid == None
    assert s.name == None
    assert s.tasks == set()
    assert s.state == states.INITIAL
    assert s.state_history == [states.INITIAL]
Esempio n. 54
0
@pytest.fixture(autouse=True)
def setup_logging():
    click_log.basic_config('vdirsyncer').setLevel(logging.DEBUG)


try:
    import pytest_benchmark
except ImportError:
    @pytest.fixture
    def benchmark():
        return lambda x: x()
else:
    del pytest_benchmark


settings.suppress_health_check = [HealthCheck.too_slow]

settings.register_profile("ci", settings(
    max_examples=1000,
    verbosity=Verbosity.verbose,
))
settings.register_profile("deterministic", settings(
    derandomize=True,
    perform_health_check=False
))

if os.environ.get('DETERMINISTIC_TESTS', 'false').lower() == 'true':
    settings.load_profile("deterministic")
elif os.environ.get('CI', 'false').lower() == 'true':
    settings.load_profile("ci")
Esempio n. 55
0
from radical.entk.execman.base import Base_TaskManager     as BaseTmgr
from radical.entk.execman.base import Base_ResourceManager as BaseRmgr
from radical.entk.execman.rp   import TaskManager          as RPTmgr
from radical.entk.execman.rp   import ResourceManager      as RPRmgr
from radical.entk.execman.mock import TaskManager          as MockTmgr
from radical.entk.execman.mock import ResourceManager      as MockRmgr
from radical.entk              import exceptions           as ree
from radical.entk              import Task, states


hostname = os.environ.get('RMQ_HOSTNAME', 'localhost')
port     = int(os.environ.get('RMQ_PORT', 5672))

# Hypothesis settings
settings.register_profile("travis", max_examples=100, deadline=None)
settings.load_profile("travis")

os.environ['ENTK_HB_INTERVAL'] = '5'


# ------------------------------------------------------------------------------
#
@given(s=st.text(),
       l=st.lists(st.characters()),
       i=st.integers())
def test_tmgr_base_initialization(s, l, i):

    try:
        home   = os.environ.get('HOME', '/home')
        folder = glob.glob('%s/.radical/utils/test.*' % home)
Esempio n. 56
0
    return Clock()


@pytest.fixture
def subclock():
    return Subclock()


@pytest.yield_fixture
def check_dump():

    def check_dump(expression, expected):
        dumped = dump(expression)
        print(dumped)
        expected = textwrap.dedent(expected.strip('\n').rstrip())
        assert dumped == expected

    yield check_dump


settings.register_profile("ci", settings(
    max_examples=1000,
    stateful_step_count=500,
))

settings.register_profile("thorough", settings(
    max_examples=10000,
    stateful_step_count=1000,
    timeout=-1,
))
Esempio n. 57
0
def setup_logging():
    click_log.basic_config('vdirsyncer').setLevel(logging.DEBUG)


@pytest.fixture(autouse=True)
def suppress_py2_warning(monkeypatch):
    monkeypatch.setattr('vdirsyncer.cli._check_python2', lambda: None)


try:
    import pytest_benchmark
except ImportError:

    @pytest.fixture
    def benchmark():
        return lambda x: x()
else:
    del pytest_benchmark

settings.register_profile(
    "ci",
    settings(max_examples=1000,
             verbosity=Verbosity.verbose,
             suppress_health_check=[HealthCheck.too_slow]))
settings.register_profile("deterministic", settings(derandomize=True, ))

if os.getenv('DETERMINISTIC_TESTS').lower() == 'true':
    settings.load_profile("deterministic")
elif os.getenv('CI').lower() == 'true':
    settings.load_profile("ci")
Esempio n. 58
0
import warnings
from tempfile import mkdtemp

import pytest

from hypothesis import settings
from hypothesis.configuration import set_hypothesis_home_dir

warnings.filterwarnings(u'error', category=UnicodeWarning)

set_hypothesis_home_dir(mkdtemp())

assert isinstance(settings, type)

settings.register_profile(
    'default', settings(timeout=-1, strict=True)
)

settings.register_profile(
    'speedy', settings(
        timeout=1, max_examples=5,
    ))


settings.register_profile(
    'nonstrict', settings(strict=False)
)

settings.load_profile(os.getenv('HYPOTHESIS_PROFILE', 'default'))

Esempio n. 59
0
from __future__ import division, print_function, absolute_import

import warnings

import pytest

import hypothesis.strategies as st
from hypothesis import given, settings
from hypothesis.errors import HypothesisDeprecationWarning
from hypothesis.internal.compat import PY3
from hypothesis.internal.reflection import arg_string

original_profile = settings.default

settings.register_profile("nonstrict", settings(strict=False))


def setup_function(fn):
    settings.load_profile("nonstrict")
    warnings.simplefilter("always", HypothesisDeprecationWarning)


def teardown_function(fn):
    settings.load_profile("default")
    warnings.simplefilter("once", HypothesisDeprecationWarning)


class BadRepr(object):
    def __init__(self, value):
        self.value = value
Esempio n. 60
0
from __future__ import absolute_import, division, print_function

import os

import pytest
from hypothesis import settings


@pytest.fixture(scope="session")
def C():
    """
    Return a simple but fully features attrs class with an x and a y attribute.
    """
    from attr import attributes, attr

    @attributes
    class C(object):
        x = attr()
        y = attr()

    return C

# PyPy on Travis appears to be too slow.
settings.register_profile("travis_pypy", settings(perform_health_check=False))
settings.load_profile(os.getenv(u'HYPOTHESIS_PROFILE', 'default'))