Esempio n. 1
0
def teardown_function(function):
    """Executed by py.test after each test in this module

    Since examples might muck with the RC settings, we reset them here.
    """
    rc.reload_rc([])
    rc.set('decoder_cache', 'enabled', 'False')
Esempio n. 2
0
def pytest_runtest_setup(item):
    rc.reload_rc([])
    rc.set('decoder_cache', 'enabled', 'False')
    rc.set('exceptions', 'simplified', 'False')

    item_name = get_item_name(item)

    # join all the lines and then split (preserving quoted strings)
    unsupported = shlex.split(
        " ".join(item.config.getini("nengo_test_unsupported")))
    # group pairs (representing testname + reason)
    unsupported = [
        unsupported[i:i + 2] for i in range(0, len(unsupported), 2)]

    for test, reason in unsupported:
        # wrap square brackets to interpret them literally
        # (see https://docs.python.org/3/library/fnmatch.html)
        test = "".join("[%s]" % c if c in ('[', ']') else c for c in test)

        # We add a '*' before test to eliminate the surprise of needing
        # a '*' before the name of a test function.
        test = "*" + test

        if fnmatch(item_name, test):
            if TestConfig.run_unsupported:
                item.add_marker(pytest.mark.xfail(reason=reason))
            else:
                pytest.skip(reason)
Esempio n. 3
0
def pytest_runtest_setup(item):  # noqa: C901
    rc.reload_rc([])
    rc.set('decoder_cache', 'enabled', 'False')
    rc.set('exceptions', 'simplified', 'False')

    if not hasattr(item, 'obj'):
        return

    for mark, option, message in [
            ('example', 'noexamples', "examples not requested"),
            ('slow', 'slow', "slow tests not requested")]:
        if getattr(item.obj, mark, None) and not item.config.getvalue(option):
            pytest.skip(message)

    if getattr(item.obj, 'noassertions', None):
        skipreasons = []
        for fixture_name, option, message in [
                ('analytics', 'analytics', "analytics not requested"),
                ('plt', 'plots', "plots not requested"),
                ('logger', 'logs', "logs not requested")]:
            if fixture_name in item.fixturenames:
                if item.config.getvalue(option):
                    break
                else:
                    skipreasons.append(message)
        else:
            pytest.skip(" and ".join(skipreasons))

    if 'Simulator' in item.fixturenames:
        for test, reason in TestConfig.Simulator.unsupported:
            # We add a '*' before test to eliminate the surprise of needing
            # a '*' before the name of a test function.
            if fnmatch(item.nodeid, '*' + test):
                pytest.xfail(reason)
Esempio n. 4
0
def pytest_runtest_setup(item):  # noqa: C901
    rc.reload_rc([])
    rc.set('decoder_cache', 'enabled', 'False')
    rc.set('exceptions', 'simplified', 'False')

    if not hasattr(item, 'obj'):
        return  # Occurs for doctests, possibly other weird tests

    conf = item.config
    test_uses_compare = getattr(item.obj, 'compare', None) is not None
    test_uses_sim = 'Simulator' in item.fixturenames
    test_uses_refsim = 'RefSimulator' in item.fixturenames
    tests_frontend = not (test_uses_sim or test_uses_refsim)

    if getattr(item.obj, 'example', None) and not conf.getvalue('noexamples'):
        pytest.skip("examples not requested")
    elif getattr(item.obj, 'slow', None) and not conf.getvalue('slow'):
        pytest.skip("slow tests not requested")
    elif not TestConfig.compare_requested and test_uses_compare:
        pytest.skip("compare tests not requested")
    elif TestConfig.is_skipping_frontend_tests() and tests_frontend:
        pytest.skip("frontend tests not run for alternate backends")
    elif (TestConfig.is_skipping_frontend_tests()
          and test_uses_refsim
          and not TestConfig.is_refsim_overridden()):
        pytest.skip("RefSimulator not overridden")
    elif (TestConfig.is_skipping_frontend_tests()
          and test_uses_sim
          and not TestConfig.is_sim_overridden()):
        pytest.skip("Simulator not overridden")
    elif getattr(item.obj, 'noassertions', None):
        options = []
        for fixture, option in [('analytics', 'analytics'),
                                ('plt', 'plots'),
                                ('logger', 'logs')]:
            if fixture in item.fixturenames and not conf.getvalue(option):
                options.append(option)
        if len(options) > 0:
            pytest.skip("%s not requested" % " and ".join(options))

    if not tests_frontend:
        item_name = get_item_name(item)

        for test, reason in TestConfig.Simulator.unsupported:
            # We add a '*' before test to eliminate the surprise of needing
            # a '*' before the name of a test function.
            if fnmatch(item_name, '*' + test):
                pytest.xfail(reason)
Esempio n. 5
0
def pytest_runtest_setup(item):  # noqa: C901
    rc.reload_rc([])
    rc.set('decoder_cache', 'enabled', 'False')
    rc.set('exceptions', 'simplified', 'False')

    if not hasattr(item, 'obj'):
        return  # Occurs for doctests, possibly other weird tests

    test_uses_sim = 'Simulator' in item.fixturenames
    test_uses_refsim = 'RefSimulator' in item.fixturenames
    tests_frontend = not (test_uses_sim or test_uses_refsim)

    if not tests_frontend:
        item_name = get_item_name(item)

        for test, reason in TestConfig.Simulator.unsupported:
            # We add a '*' before test to eliminate the surprise of needing
            # a '*' before the name of a test function.
            if fnmatch(item_name, '*' + test):
                pytest.xfail(reason)
Esempio n. 6
0
def pytest_runtest_setup(item):  # noqa: C901
    rc.reload_rc([])
    rc.set('decoder_cache', 'enabled', 'False')
    rc.set('exceptions', 'simplified', 'False')

    if not hasattr(item, 'obj'):
        return

    for mark, option, message in [
        ('example', 'noexamples', "examples not requested"),
        ('slow', 'slow', "slow tests not requested")
    ]:
        if getattr(item.obj, mark, None) and not item.config.getvalue(option):
            pytest.skip(message)

    if getattr(item.obj, 'noassertions', None):
        skipreasons = []
        for fixture_name, option, message in [
            ('analytics', 'analytics', "analytics not requested"),
            ('plt', 'plots', "plots not requested"),
            ('logger', 'logs', "logs not requested")
        ]:
            if fixture_name in item.fixturenames:
                if item.config.getvalue(option):
                    break
                else:
                    skipreasons.append(message)
        else:
            pytest.skip(" and ".join(skipreasons))

    if 'Simulator' in item.fixturenames:
        for test, reason in TestConfig.Simulator.unsupported:
            # We add a '*' before test to eliminate the surprise of needing
            # a '*' before the name of a test function.
            if fnmatch(item.nodeid, '*' + test):
                pytest.xfail(reason)
Esempio n. 7
0
def pytest_runtest_setup(item):
    rc.reload_rc([])
    rc["decoder_cache"]["enabled"] = "False"
    rc["exceptions"]["simplified"] = "False"
    rc["nengo.Simulator"]["fail_fast"] = "True"
    rc["progress"]["progress_bar"] = "False"
Esempio n. 8
0
def pytest_configure(config):
    rc.reload_rc([])
    rc.set('decoder_cache', 'enabled', 'false')
Esempio n. 9
0
def pytest_configure(config):
    rc.reload_rc([])
    rc.set('decoder_cache', 'enabled', 'false')
Esempio n. 10
0
def pytest_runtest_setup(item):
    rc.reload_rc([])
    rc.set("decoder_cache", "enabled", "False")
    rc.set("exceptions", "simplified", "False")
    rc.set("nengo.Simulator", "fail_fast", "True")
Esempio n. 11
0
def pytest_configure(config):
    rc.reload_rc([])
    rc.set("decoder_cache", "enabled", "false")