Example #1
0
    def test_downstream_blackhole(
        self, container, publish, toxiproxy
    ):  # pragma: no cover
        """ Verify we detect and recover from sockets losing data.

        This failure mode means that all data sent from the rabbit broker to
        the consumer is lost, but the socket remains open.

        Heartbeat acknowledgements from the broker are not received by the
        consumer. After two beats are missed the consumer raises a "too many
        heartbeats missed" error.

        Cancelling the consumer requests an acknowledgement from the broker,
        which is swallowed by the socket. There is no timeout when reading
        the acknowledgement so this hangs forever.

        See :meth:`kombu.messsaging.Consumer.__exit__`
        """
        pytest.skip("skip until kombu supports recovery in this scenario")

        queue_consumer = get_extension(container, QueueConsumer)

        def reset(args, kwargs, result, exc_info):
            toxiproxy.reset_timeout()
            return True

        with patch_wait(queue_consumer, 'on_connection_error', callback=reset):
            toxiproxy.set_timeout(stream="downstream", timeout=0)

        # connection re-established
        msg = "foo"
        with entrypoint_waiter(container, 'echo') as result:
            publish(msg)
        assert result.get() == msg
Example #2
0
 def __init__(self, *a, **kw):
     super(TranslatedBuildTest, self).__init__(*a, **kw)
     try:
         self.oldlocale = locale.getlocale()
         locale.setlocale(locale.LC_ALL, ("pl_PL", "utf8"))
     except:
         pytest.skip()
def test_pqueue_by_servicebus_client_fail_send_messages(live_servicebus_config, partitioned_queue):
    client = ServiceBusClient(
        service_namespace=live_servicebus_config['hostname'],
        shared_access_key_name=live_servicebus_config['key_name'],
        shared_access_key_value=live_servicebus_config['access_key'],
        debug=True)

    queue_client = client.get_queue(partitioned_queue)
    too_large = "A" * 1024 * 512
    try:
        results = queue_client.send(Message(too_large))
    except MessageSendFailed:
        pytest.skip("Open issue for uAMQP on OSX")

    assert len(results) == 1
    assert not results[0][0]
    assert isinstance(results[0][1], MessageSendFailed)

    with queue_client.get_sender() as sender:
        with pytest.raises(MessageSendFailed):
            sender.send(Message(too_large))

    with queue_client.get_sender() as sender:
        sender.queue_message(Message(too_large))
        results = sender.send_pending_messages()
        assert len(results) == 1
        assert not results[0][0]
        assert isinstance(results[0][1], MessageSendFailed)
Example #4
0
 def test_text(self):
     win = self.win
     if self.win.winType=='pygame':
         pytest.skip("Text is different on pygame")
     #set font
     fontFile = os.path.join(prefs.paths['resources'], 'DejaVuSerif.ttf')
     #using init
     stim = visual.TextStim(win,text=u'\u03A8a', color=[0.5,1.0,1.0], ori=15,
         height=0.8*self.scaleFactor, pos=[0,0], font='DejaVu Serif',
         fontFiles=[fontFile], autoLog=False)
     stim.draw()
     #compare with a LIBERAL criterion (fonts do differ)
     utils.compareScreenshot('text1_%s.png' %(self.contextName), win, crit=20)
     win.flip()#AFTER compare screenshot
     #using set
     stim.setText('y', log=False)
     if sys.platform=='win32':
         stim.setFont('Courier New', log=False)
     else:
         stim.setFont('Courier', log=False)
     stim.setOri(-30.5, log=False)
     stim.setHeight(1.0*self.scaleFactor, log=False)
     stim.setColor([0.1,-1,0.8], colorSpace='rgb', log=False)
     stim.setPos([-0.5,0.5],'+', log=False)
     stim.setContrast(0.8, log=False)
     stim.setOpacity(0.8, log=False)
     stim.draw()
     str(stim) #check that str(xxx) is working
     #compare with a LIBERAL criterion (fonts do differ)
     utils.compareScreenshot('text2_%s.png' %(self.contextName), win, crit=20)
def test_pqueue_by_servicebus_client_fail_send_batch_messages(live_servicebus_config, partitioned_queue):
    pytest.skip("TODO: Pending bugfix in uAMQP")
    def batch_data():
        for i in range(3):
            yield str(i) * 1024 * 256

    client = ServiceBusClient(
        service_namespace=live_servicebus_config['hostname'],
        shared_access_key_name=live_servicebus_config['key_name'],
        shared_access_key_value=live_servicebus_config['access_key'],
        debug=True)

    queue_client = client.get_queue(partitioned_queue)
    results = queue_client.send(BatchMessage(batch_data()))
    assert len(results) == 4
    assert not results[0][0]
    assert isinstance(results[0][1], MessageSendFailed)

    with queue_client.get_sender() as sender:
        with pytest.raises(MessageSendFailed):
            sender.send(BatchMessage(batch_data()))

    with queue_client.get_sender() as sender:
        sender.queue_message(BatchMessage(batch_data()))
        results = sender.send_pending_messages()
        assert len(results) == 4
        assert not results[0][0]
        assert isinstance(results[0][1], MessageSendFailed)
Example #6
0
def test_context_sensitive_shell(web_server, browser, dbsession, init):
    """See we can open a context sensitive shell in admin."""

    if dbsession.bind.dialect.name == "sqlite":
        pytest.skip("This fails with sqlite on Travis - a fact that doesn't make sense, but it does")

    b = browser
    create_logged_in_user(dbsession, init.config.registry, web_server, browser, admin=True)

    b.find_by_css("#nav-admin").click()
    b.find_by_css("#latest-user-shortcut").click()
    b.find_by_css("#btn-crud-shell").click()

    # Ramping up shell takes some extended time
    time.sleep(5)

    # We succesfully exposed obj
    assert b.is_text_present("*****@*****.**")

    # File menu
    b.find_by_css(".dropdown a")[0].click()

    # Shutdown and Back to the home
    assert b.is_element_visible_by_css("#shutdown")
    b.find_by_css("#shutdown").click()

    # There should be alert "Do you really wish to leave notebook?"
    time.sleep(0.5)
    alert = b.driver.switch_to_alert()
    alert.accept()

    # Back to home screen
    assert b.is_element_visible_by_css("#nav-logout")
Example #7
0
def test_cifar_convnet_error(device_id):
    if cntk_device(device_id).type() != DeviceKind_GPU:
        pytest.skip('test only runs on GPU')
    set_default_device(cntk_device(device_id))

    try:
        base_path = os.path.join(os.environ['CNTK_EXTERNAL_TESTDATA_SOURCE_DIRECTORY'],
                                *"Image/CIFAR/v0/cifar-10-batches-py".split("/"))
        # N.B. CNTK_EXTERNAL_TESTDATA_SOURCE_DIRECTORY has {train,test}_map.txt
        #      and CIFAR-10_mean.xml in the base_path.
    except KeyError:
        base_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                *"../../../../Examples/Image/DataSets/CIFAR-10".split("/"))

    base_path = os.path.normpath(base_path)
    os.chdir(os.path.join(base_path, '..'))

    from _cntk_py import set_computation_network_trace_level, set_fixed_random_seed, force_deterministic_algorithms
    set_computation_network_trace_level(1)
    set_fixed_random_seed(1)  # BUGBUG: has no effect at present  # TODO: remove debugging facilities once this all works
    #force_deterministic_algorithms()
    # TODO: do the above; they lead to slightly different results, so not doing it for now

    reader_train = create_reader(os.path.join(base_path, 'train_map.txt'), os.path.join(base_path, 'CIFAR-10_mean.xml'), True)
    reader_test  = create_reader(os.path.join(base_path, 'test_map.txt'), os.path.join(base_path, 'CIFAR-10_mean.xml'), False)

    test_error = convnet_cifar10_dataaug(reader_train, reader_test, max_epochs=1)
    expected_test_error = 0.617

    assert np.allclose(test_error, expected_test_error,
                       atol=TOLERANCE_ABSOLUTE)
Example #8
0
def test_capturing_and_logging_fundamentals(testdir, method):
    if method == "StdCaptureFD" and not hasattr(os, 'dup'):
        pytest.skip("need os.dup")
    # here we check a fundamental feature
    p = testdir.makepyfile("""
        import sys, os
        import py, logging
        from _pytest import capture
        cap = capture.MultiCapture(out=False, in_=False,
                                     Capture=capture.%s)
        cap.start_capturing()

        logging.warn("hello1")
        outerr = cap.readouterr()
        print ("suspend, captured %%s" %%(outerr,))
        logging.warn("hello2")

        cap.pop_outerr_to_orig()
        logging.warn("hello3")

        outerr = cap.readouterr()
        print ("suspend2, captured %%s" %% (outerr,))
    """ % (method,))
    result = testdir.runpython(p)
    result.stdout.fnmatch_lines("""
        suspend, captured*hello1*
        suspend2, captured*WARNING:root:hello3*
    """)
    result.stderr.fnmatch_lines("""
        WARNING:root:hello2
    """)
    assert "atexit" not in result.stderr.str()
Example #9
0
def test_unary_ufunc(ufunc):
    if ufunc == 'fix':
        pytest.skip('fix calls floor in a way that we do not yet support')
    dafunc = getattr(da, ufunc)
    npfunc = getattr(np, ufunc)

    arr = np.random.randint(1, 100, size=(20, 20))
    darr = da.from_array(arr, 3)

    with pytest.warns(None):  # some invalid values (arccos, arcsin, etc.)
        # applying Dask ufunc doesn't trigger computation
        assert isinstance(dafunc(darr), da.Array)
        assert_eq(dafunc(darr), npfunc(arr), equal_nan=True)

    with pytest.warns(None):  # some invalid values (arccos, arcsin, etc.)
        # applying NumPy ufunc is lazy
        if isinstance(npfunc, np.ufunc):
            assert isinstance(npfunc(darr), da.Array)
        else:
            assert isinstance(npfunc(darr), np.ndarray)
        assert_eq(npfunc(darr), npfunc(arr), equal_nan=True)

    with pytest.warns(None):  # some invalid values (arccos, arcsin, etc.)
        # applying Dask ufunc to normal ndarray triggers computation
        assert isinstance(dafunc(arr), np.ndarray)
        assert_eq(dafunc(arr), npfunc(arr), equal_nan=True)
Example #10
0
def py_proc():
    """Get a python executable and args list which executes the given code."""
    if getattr(sys, 'frozen', False):
        pytest.skip("Can't be run when frozen")
    def func(code):
        return (sys.executable, ['-c', textwrap.dedent(code.strip('\n'))])
    return func
Example #11
0
    def test_yy_format_with_yearfirst(self):
        data = """date,time,B,C
090131,0010,1,2
090228,1020,3,4
090331,0830,5,6
"""

        # See gh-217
        import dateutil
        if dateutil.__version__ >= LooseVersion('2.5.0'):
            pytest.skip("testing yearfirst=True not-support"
                        "on datetutil < 2.5.0 this works but"
                        "is wrong")

        rs = self.read_csv(StringIO(data), index_col=0,
                           parse_dates=[['date', 'time']])
        idx = DatetimeIndex([datetime(2009, 1, 31, 0, 10, 0),
                             datetime(2009, 2, 28, 10, 20, 0),
                             datetime(2009, 3, 31, 8, 30, 0)],
                            dtype=object, name='date_time')
        xp = DataFrame({'B': [1, 3, 5], 'C': [2, 4, 6]}, idx)
        tm.assert_frame_equal(rs, xp)

        rs = self.read_csv(StringIO(data), index_col=0,
                           parse_dates=[[0, 1]])
        idx = DatetimeIndex([datetime(2009, 1, 31, 0, 10, 0),
                             datetime(2009, 2, 28, 10, 20, 0),
                             datetime(2009, 3, 31, 8, 30, 0)],
                            dtype=object, name='date_time')
        xp = DataFrame({'B': [1, 3, 5], 'C': [2, 4, 6]}, idx)
        tm.assert_frame_equal(rs, xp)
    def test_logout(self):
        """Make sure after we've logged out we can't access any of the formgrader pages."""
        if self.manager.jupyterhub is None:
            pytest.skip("JupyterHub is not running")

        # logout and wait for the login page to appear
        self._get("{}/hub".format(self.manager.base_url))
        self._wait_for_element("logout")
        self._wait_for_visibility_of_element("logout")
        element = self.browser.find_element_by_id("logout")
        element.click()
        self._wait_for_element("username_input")

        # try going to a formgrader page
        self._get(self.manager.base_formgrade_url)
        self._wait_for_element("username_input")
        next_url = self.formgrade_url().replace(self.manager.base_url, "")
        self._check_url("{}/hub/login?next={}".format(self.manager.base_url, next_url))

        # this will fail if we have a cookie for another user and try to access
        # a live notebook for that user
        if isinstance(self.manager, HubAuthNotebookServerUserManager):
            pytest.xfail("https://github.com/jupyter/jupyterhub/pull/290")

        # try going to a live notebook page
        problem = self.gradebook.find_assignment("Problem Set 1").notebooks[0]
        submission = sorted(problem.submissions, key=lambda x: x.id)[0]
        url = self.notebook_url("autograded/{}/Problem Set 1/{}.ipynb".format(submission.student.id, problem.name))
        self._get(url)
        self._wait_for_element("username_input")
        next_url = quote(url.replace(self.manager.base_url, ""))
        self._check_url("{}/hub/?next={}".format(self.manager.base_url, next_url))
Example #13
0
def srtm_login_or_skip(monkeypatch):
    import os
    try:
        srtm_username = os.environ['SRTM_USERNAME']
    except KeyError:
        pytest.skip('SRTM_USERNAME environment variable is unset.')
    try:
        srtm_password = os.environ['SRTM_PASSWORD']
    except KeyError:
        pytest.skip('SRTM_PASSWORD environment variable is unset.')

    from six.moves.urllib.request import (HTTPBasicAuthHandler,
                                          HTTPCookieProcessor,
                                          HTTPPasswordMgrWithDefaultRealm,
                                          build_opener)
    from six.moves.http_cookiejar import CookieJar

    password_manager = HTTPPasswordMgrWithDefaultRealm()
    password_manager.add_password(
        None,
        "https://urs.earthdata.nasa.gov",
        srtm_username,
        srtm_password)
    cookie_jar = CookieJar()
    opener = build_opener(HTTPBasicAuthHandler(password_manager),
                          HTTPCookieProcessor(cookie_jar))

    monkeypatch.setattr(cartopy.io, 'urlopen', opener.open)
def test_solc_installation_as_function_call(monkeypatch, tmpdir, platform, version):
    if get_platform() != platform:
        pytest.skip("Wront platform for install script")

    base_install_path = str(tmpdir.mkdir("temporary-dir"))
    monkeypatch.setenv('SOLC_BASE_INSTALL_PATH', base_install_path)

    # sanity check that it's not already installed.
    executable_path = get_executable_path(version)
    assert not os.path.exists(executable_path)

    install_solc(identifier=version, platform=platform)

    assert os.path.exists(executable_path)
    monkeypatch.setenv('SOLC_BINARY', executable_path)

    extract_path = get_extract_path(version)
    if os.path.exists(extract_path):
        contains_so_file = any(
            os.path.basename(path).partition(os.path.extsep)[2] == 'so'
            for path
            in os.listdir(extract_path)
        )
        if contains_so_file:
            monkeypatch.setenv('LD_LIBRARY_PATH', extract_path)

    actual_version = get_solc_version()
    expected_version = semantic_version.Spec(version.lstrip('v'))

    assert actual_version in expected_version
Example #15
0
def test_pickles(current_pickle_data, legacy_pickle):
    if not is_platform_little_endian():
        pytest.skip("known failure on non-little endian")

    version = os.path.basename(os.path.dirname(legacy_pickle))
    with catch_warnings(record=True):
        compare(current_pickle_data, legacy_pickle, version)
def test_cntk_203_reinforcement_learning_basics_noErrors(nb):
    if os.getenv("OS")=="Windows_NT" and sys.version_info[0] == 2:
        pytest.skip('tests with Python 2.7 on Windows are not stable in the CI environment. ')
    errors = [output for cell in nb.cells if 'outputs' in cell
              for output in cell['outputs'] if output.output_type == "error"]
    print(errors)
    assert errors == []
Example #17
0
def test_provider_crud(request, rest_api, from_detail):
    """Test the CRUD on provider using REST API.
    Steps:
        * POST /api/providers (method ``create``) <- {"hostname":..., "name":..., "type":
            "EmsVmware"}
        * Remember the provider ID.
        * Delete it either way:
            * DELETE /api/providers/<id>
            * POST /api/providers (method ``delete``) <- list of dicts containing hrefs to the
                providers, in this case just list with one dict.
    Metadata:
        test_flag: rest
    """
    if "create" not in rest_api.collections.providers.action.all:
        pytest.skip("Create action is not implemented in this version")

    if current_version() < "5.5":
        provider_type = "EmsVmware"
    else:
        provider_type = "ManageIQ::Providers::Vmware::InfraManager"
    provider = rest_api.collections.providers.action.create(
        hostname=fauxfactory.gen_alphanumeric(),
        name=fauxfactory.gen_alphanumeric(),
        type=provider_type,
    )[0]
    if from_detail:
        provider.action.delete()
        provider.wait_not_exists(num_sec=30, delay=0.5)
    else:
        rest_api.collections.providers.action.delete(provider)
        provider.wait_not_exists(num_sec=30, delay=0.5)
def test_long_running_send(connection_str):
    if sys.platform.startswith('darwin'):
        import pytest
        pytest.skip("Skipping on OSX")
    parser = argparse.ArgumentParser()
    parser.add_argument("--duration", help="Duration in seconds of the test", type=int, default=30)
    parser.add_argument("--payload", help="payload size", type=int, default=512)
    parser.add_argument("--batch", help="Number of events to send and wait", type=int, default=1)
    parser.add_argument("--conn-str", help="EventHub connection string", default=connection_str)
    parser.add_argument("--eventhub", help="Name of EventHub")
    parser.add_argument("--address", help="Address URI to the EventHub entity")
    parser.add_argument("--sas-policy", help="Name of the shared access policy to authenticate with")
    parser.add_argument("--sas-key", help="Shared access key")

    args, _ = parser.parse_known_args()
    if args.conn_str:
        client = EventHubClient.from_connection_string(
            args.conn_str,
            eventhub=args.eventhub)
    elif args.address:
        client = EventHubClient(
            args.address,
            username=args.sas_policy,
            password=args.sas_key)
    else:
        try:
            import pytest
            pytest.skip("Must specify either '--conn-str' or '--address'")
        except ImportError:
            raise ValueError("Must specify either '--conn-str' or '--address'")

    try:
        main(client, args)
    except KeyboardInterrupt:
        pass
Example #19
0
def test_install_tar_lzma(script, data):
    try:
        import lzma  # noqa
    except ImportError:
        pytest.skip("No lzma support")
    res = script.pip('install', data.packages / 'singlemodule-0.0.1.tar.lzma')
    assert "Successfully installed singlemodule-0.0.1" in res.stdout, res
Example #20
0
def skip_if_empty(backend_list, required_interfaces):
    if not backend_list:
        pytest.skip(
            "No backends provided supply the interface: {0}".format(
                ", ".join(iface.__name__ for iface in required_interfaces)
            )
        )
Example #21
0
    def wait_scroll_pos_changed(self, x=None, y=None):
        """Wait until a "Scroll position changed" message was found.

        With QtWebEngine, on older Qt versions which lack
        QWebEnginePage.scrollPositionChanged, this also skips the test.
        """
        __tracebackhide__ = (lambda e:
                             e.errisinstance(testprocess.WaitForTimeout))
        if (x is None and y is not None) or (y is None and x is not None):
            raise ValueError("Either both x/y or neither must be given!")

        if self.request.config.webengine:
            # pylint: disable=no-name-in-module,useless-suppression
            from PyQt5.QtWebEngineWidgets import QWebEnginePage
            # pylint: enable=no-name-in-module,useless-suppression
            if not hasattr(QWebEnginePage, 'scrollPositionChanged'):
                # Qt < 5.7
                pytest.skip("QWebEnginePage.scrollPositionChanged missing")
        if x is None and y is None:
            point = 'PyQt5.QtCore.QPoint(*, *)'  # not counting 0/0 here
        elif x == '0' and y == '0':
            point = 'PyQt5.QtCore.QPoint()'
        else:
            point = 'PyQt5.QtCore.QPoint({}, {})'.format(x, y)
        self.wait_for(category='webview',
                      message='Scroll position changed to ' + point)
Example #22
0
def test_ogr_fgdb_stress_2():
    if ogrtest.fgdb_drv is None:
        pytest.skip()

    ds_test = ogr.Open('tmp/test.gdb')
    ds_ref = ogr.Open('tmp/test.' + ogrtest.reference_ext)

    lyr_test = ds_test.GetLayer(0)
    lyr_ref = ds_ref.GetLayer(0)

    while True:
        f_test = lyr_test.GetNextFeature()
        f_ref = lyr_ref.GetNextFeature()
        assert not (f_test is None and f_ref is not None) or (f_test is not None and f_ref is None)
        if f_test is None:
            break
        if f_test.GetFID() != f_ref.GetFID() or \
           f_test['str'] != f_ref['str'] or \
           ogrtest.check_feature_geometry(f_test, f_ref.GetGeometryRef()) != 0:
            f_test.DumpReadable()
            f_ref.DumpReadable()
            pytest.fail()

    for val in range(1000):
        lyr_test.SetAttributeFilter("str = '%d'" % val)
        lyr_ref.SetAttributeFilter("str = '%d'" % val)
        assert lyr_test.GetFeatureCount() == lyr_ref.GetFeatureCount(), val
Example #23
0
File: demos.py Project: pymor/pymor
def _skip_if_no_fenics(param):
    _, args = param
    needs_fenics = len([f for f in args if "fenics" in str(f)]) > 0
    from pymor.core.config import config

    if needs_fenics and not config.HAVE_FENICS:
        pytest.skip("skipped test due to missing Fenics")
Example #24
0
    def test_encode(self, html_encoding_file):
        _, encoding = os.path.splitext(
            os.path.basename(html_encoding_file)
        )[0].split('_')

        try:
            with open(html_encoding_file, 'rb') as fobj:
                from_string = self.read_html(fobj.read(), encoding=encoding,
                                             index_col=0).pop()

            with open(html_encoding_file, 'rb') as fobj:
                from_file_like = self.read_html(BytesIO(fobj.read()),
                                                encoding=encoding,
                                                index_col=0).pop()

            from_filename = self.read_html(html_encoding_file,
                                           encoding=encoding,
                                           index_col=0).pop()
            tm.assert_frame_equal(from_string, from_file_like)
            tm.assert_frame_equal(from_string, from_filename)
        except Exception:
            # seems utf-16/32 fail on windows
            if is_platform_windows():
                if '16' in encoding or '32' in encoding:
                    pytest.skip()
                raise
def test_ascii_path(pyi_builder):
    distdir = pyi_builder._distdir
    dd_ascii = distdir.encode('ascii', 'replace').decode('ascii')
    if distdir != dd_ascii:
        pytest.skip(reason="Default build path not ASCII, skipping...")

    pyi_builder.test_script('pyi_path_encoding.py')
Example #26
0
 def test_delete_cascades_to_tags(self):
     pytest.skip('sqlite is not compiled with foreign key support on Jenkins; this test works on my machine but not on Jenkins')
     Session.crud.delete(query_from(db.turner_account))
     Session.crud.delete(query_from(db.turner))
     with Session() as session:
         self.assertEqual(1, session.query(Account).count())
         self.assertEqual(2, session.query(Tag).count())
Example #27
0
    def test_verbose_reporting(self, testdir, pytestconfig):
        p1 = testdir.makepyfile("""
            import pytest
            def test_fail():
                raise ValueError()
            def test_pass():
                pass
            class TestClass:
                def test_skip(self):
                    pytest.skip("hello")
            def test_gen():
                def check(x):
                    assert x == 1
                yield check, 0
        """)
        result = testdir.runpytest(p1, '-v')
        result.stdout.fnmatch_lines([
            "*test_verbose_reporting.py::test_fail *FAIL*",
            "*test_verbose_reporting.py::test_pass *PASS*",
            "*test_verbose_reporting.py::TestClass::test_skip *SKIP*",
            "*test_verbose_reporting.py::test_gen*0* *FAIL*",
        ])
        assert result.ret == 1

        if not pytestconfig.pluginmanager.get_plugin("xdist"):
            pytest.skip("xdist plugin not installed")

        result = testdir.runpytest(p1, '-v', '-n 1')
        result.stdout.fnmatch_lines([
            "*FAIL*test_verbose_reporting.py::test_fail*",
        ])
        assert result.ret == 1
def test_run_datastore_analysis(setup_provider, datastore, soft_assert, datastores_hosts_setup,
                                clear_all_tasks, appliance):
    """Tests smarthost analysis

    Metadata:
        test_flag: datastore_analysis
    """
    # Initiate analysis
    try:
        datastore.run_smartstate_analysis(wait_for_task_result=True)
    except MenuItemNotFound:
        # TODO need to update to cover all detastores
        pytest.skip('Smart State analysis is disabled for {} datastore'.format(datastore.name))
    details_view = navigate_to(datastore, 'DetailsFromProvider')
    # c_datastore = details_view.entities.properties.get_text_of("Datastore Type")

    # Check results of the analysis and the datastore type
    # TODO need to clarify datastore type difference
    # soft_assert(c_datastore == datastore.type.upper(),
    #             'Datastore type does not match the type defined in yaml:' +
    #             'expected "{}" but was "{}"'.format(datastore.type.upper(), c_datastore))

    wait_for(lambda: details_view.entities.content.get_text_of(CONTENT_ROWS_TO_CHECK[0]),
             delay=15, timeout="3m",
             fail_condition='0',
             fail_func=appliance.server.browser.refresh)
    managed_vms = details_view.entities.relationships.get_text_of('Managed VMs')
    if managed_vms != '0':
        for row_name in CONTENT_ROWS_TO_CHECK:
            value = details_view.entities.content.get_text_of(row_name)
            soft_assert(value != '0',
                        'Expected value for {} to be non-empty'.format(row_name))
    else:
        assert details_view.entities.content.get_text_of(CONTENT_ROWS_TO_CHECK[-1]) != '0'
Example #29
0
    def test_radial(self):
        if self.win.winType=='pygame':
            pytest.skip("RadialStim dodgy on pygame")
        win = self.win
        #using init
        wedge = visual.RadialStim(win, tex='sqrXsqr', color=1,size=2*self.scaleFactor,
            visibleWedge=[0, 45], radialCycles=2, angularCycles=2, interpolate=False, autoLog=False)
        wedge.draw()
        thresh = 10
        utils.compareScreenshot('wedge1_%s.png' %(self.contextName), win, crit=thresh)
        win.flip()#AFTER compare screenshot

        #using .set()
        wedge.setMask('gauss', log=False)
        wedge.setSize(3*self.scaleFactor, log=False)
        wedge.setAngularCycles(3, log=False)
        wedge.setRadialCycles(3, log=False)
        wedge.setOri(180, log=False)
        wedge.setContrast(0.8, log=False)
        wedge.setOpacity(0.8, log=False)
        wedge.setRadialPhase(0.1,operation='+', log=False)
        wedge.setAngularPhase(0.1, log=False)
        wedge.draw()
        str(wedge) #check that str(xxx) is working
        utils.compareScreenshot('wedge2_%s.png' %(self.contextName), win, crit=10.0)
    def test_that_user_can_purchase_an_app(self, base_url, selenium, new_user):
        if '-dev' not in base_url:
            pytest.skip("Payments can only be tested on dev.")
        else:
            pytest.xfail("Bug 1212152 - App purchases are failing on dev")

        home_page = Home(base_url, selenium)
        home_page.go_to_homepage()
        home_page.header.click_sign_in()
        home_page.login(new_user['email'], new_user['password'])
        assert home_page.is_the_current_page
        home_page.set_region('us')

        # Use the first paid app
        app = home_page.header.search(':paid').results[0]
        app_name = app.name
        details_page = app.click_name()
        assert 'free' not in details_page.price_text
        assert 'paid' in details_page.app_status

        payment = details_page.click_install_button()
        payment.create_pin(self.PIN)
        payment.wait_for_buy_app_section_displayed()
        assert app_name == payment.app_name

        payment.click_buy_button()
        # We are not able to interact with the doorhanger that appears to install the app
        # using Selenium
        # We can check for the `purchased` attribute on the price button though
        details_page.wait_for_app_purchased()
def test_rounding():
    # Round a off to three decimal places.
    if not result_1:
        pytest.skip(f"You didn't finish this task. the result variable equals None")
    assert round(a, 3) == result_1, f'Expected {round(a, 3)}, but got {result_1}'
def test_sql_server_cdc_insert_and_update(sdc_builder, sdc_executor, database, use_table):
    """Test for SQL Server CDC origin stage with insert and update ops

    The pipeline looks like:
        sql_server_cdc_origin >> jdbc_producer
    """
    if not database.is_cdc_enabled:
        pytest.skip('Test only runs against SQL Server with CDC enabled.')

    try:
        connection = database.engine.connect()
        schema_name = DEFAULT_SCHEMA_NAME

        rows_in_database = setup_sample_data(1)

        # create the table and insert 1 row
        table_name = get_random_string(string.ascii_lowercase, 20)
        table = setup_table(connection, schema_name, table_name, rows_in_database)

        # update the row
        updated_name = 'jisun'
        connection.execute(table.update()
                           .where(table.c.id == 0)
                           .values(name=updated_name))

        total_no_of_records = 3

        # get the capture_instance_name
        capture_instance_name = f'{schema_name}_{table_name}'

        pipeline_builder = sdc_builder.get_pipeline_builder()
        sql_server_cdc = pipeline_builder.add_stage('SQL Server CDC Client')
        sql_server_cdc.set_attributes(table_configs=[{'capture_instance': capture_instance_name}],
                                      use_direct_table_query=use_table,
                                      fetch_size=2
                                      )

        # create the destination table
        dest_table_name = get_random_string(string.ascii_uppercase, 9)
        dest_table = create_table(database, DEFAULT_SCHEMA_NAME, dest_table_name)

        jdbc_producer = pipeline_builder.add_stage('JDBC Producer')

        jdbc_producer.set_attributes(schema_name=DEFAULT_SCHEMA_NAME,
                                     table_name=dest_table_name,
                                     default_operation='INSERT',
                                     field_to_column_mapping=[])

        sql_server_cdc >> jdbc_producer
        pipeline = pipeline_builder.build().configure_for_environment(database)
        sdc_executor.add_pipeline(pipeline)

        # wait for data captured by cdc jobs in sql server before starting the pipeline
        ct_table_name = f'{capture_instance_name}_CT'
        wait_for_data_in_ct_table(ct_table_name, total_no_of_records, database)

        sdc_executor.start_pipeline(pipeline).wait_for_pipeline_output_records_count(total_no_of_records)
        sdc_executor.stop_pipeline(pipeline)
        expected_rows_in_database = [
            {'id': rows_in_database[0].get('id'), 'name': updated_name, 'dt': rows_in_database[0].get('dt')}]
        assert_table_replicated(database, expected_rows_in_database, DEFAULT_SCHEMA_NAME, dest_table_name)

    finally:
        if table is not None:
            logger.info('Dropping table %s in %s database...', table, database.type)
            table.drop(database.engine)

        if dest_table is not None:
            logger.info('Dropping table %s in %s database...', dest_table, database.type)
            dest_table.drop(database.engine)

        if connection is not None:
            connection.close()
def test_sql_server_cdc_with_last_committed_offset(sdc_builder, sdc_executor, database, use_table):
    """Test for SQL Server CDC origin stage with nonempty last committed offset by restarting the pipeline

    The pipeline looks like:
        sql_server_cdc_origin >> jdbc_producer
    """
    if not database.is_cdc_enabled:
        pytest.skip('Test only runs against SQL Server with CDC enabled.')

    try:
        connection = database.engine.connect()
        schema_name = DEFAULT_SCHEMA_NAME
        first_no_of_records = 5
        second_no_of_records = 8
        total_no_of_records = first_no_of_records + second_no_of_records

        rows_in_database = setup_sample_data(total_no_of_records)

        # create the table and insert the first half of the rows
        table_name = get_random_string(string.ascii_lowercase, 20)
        table = setup_table(connection, schema_name, table_name, rows_in_database[0:first_no_of_records])

        # get the capture_instance_name
        capture_instance_name = f'{schema_name}_{table_name}'

        pipeline_builder = sdc_builder.get_pipeline_builder()
        sql_server_cdc = pipeline_builder.add_stage('SQL Server CDC Client')
        sql_server_cdc.set_attributes(table_configs=[{'capture_instance': capture_instance_name}],
                                      use_direct_table_query=use_table
                                      )

        # create the destination table
        dest_table_name = get_random_string(string.ascii_uppercase, 9)
        dest_table = create_table(database, DEFAULT_SCHEMA_NAME, dest_table_name)

        jdbc_producer = pipeline_builder.add_stage('JDBC Producer')

        jdbc_producer.set_attributes(schema_name=DEFAULT_SCHEMA_NAME,
                                     table_name=dest_table_name,
                                     default_operation='INSERT',
                                     field_to_column_mapping=[])

        sql_server_cdc >> jdbc_producer
        pipeline = pipeline_builder.build().configure_for_environment(database)
        sdc_executor.add_pipeline(pipeline)

        # wait for data captured by cdc jobs in sql server before starting the pipeline
        ct_table_name = f'{capture_instance_name}_CT'
        wait_for_data_in_ct_table(ct_table_name, first_no_of_records, database)

        sdc_executor.start_pipeline(pipeline).wait_for_pipeline_output_records_count(first_no_of_records)
        sdc_executor.stop_pipeline(pipeline)
        assert_table_replicated(database, rows_in_database[0:first_no_of_records], DEFAULT_SCHEMA_NAME, dest_table_name)

        # insert the rest half of the sample data
        add_data_to_table(connection, table, rows_in_database[first_no_of_records:total_no_of_records])
        wait_for_data_in_ct_table(ct_table_name, total_no_of_records, database)

        # restart the pipeline
        sdc_executor.start_pipeline(pipeline).wait_for_pipeline_output_records_count(second_no_of_records)
        sdc_executor.stop_pipeline(pipeline)
        assert_table_replicated(database, rows_in_database, DEFAULT_SCHEMA_NAME, dest_table_name)

    finally:
        if table is not None:
            logger.info('Dropping table %s in %s database...', table, database.type)
            table.drop(database.engine)

        if dest_table is not None:
            logger.info('Dropping table %s in %s database...', dest_table, database.type)
            dest_table.drop(database.engine)

        if connection is not None:
            connection.close()
def test_sql_server_cdc_with_nonempty_initial_offset(sdc_builder, sdc_executor, database, use_table):
    """Test for SQL Server CDC origin stage with non-empty initial offset (fetch the data from the given LSN)
    on both use table config is true and false

    The pipeline looks like:
        sql_server_cdc_origin >> jdbc_producer
    """
    if not database.is_cdc_enabled:
        pytest.skip('Test only runs against SQL Server with CDC enabled.')

    try:
        connection = database.engine.connect()
        schema_name = DEFAULT_SCHEMA_NAME
        first_no_of_records = 3
        second_no_of_records = 8
        total_no_of_records = first_no_of_records + second_no_of_records
        rows_in_database = setup_sample_data(total_no_of_records)

        # create the table and insert the first half of the rows
        table_name = get_random_string(string.ascii_lowercase, 20)
        capture_instance_name = f'{schema_name}_{table_name}'
        table = setup_table(connection, schema_name, table_name, rows_in_database[0:first_no_of_records])
        ct_table_name = f'{capture_instance_name}_CT'
        wait_for_data_in_ct_table(ct_table_name, first_no_of_records, database)

        # insert the last half of the sample data
        add_data_to_table(connection, table, rows_in_database[first_no_of_records:total_no_of_records])
        wait_for_data_in_ct_table(ct_table_name, total_no_of_records, database)

        # get the capture_instance_name
        capture_instance_name = f'{schema_name}_{table_name}'

        # get the current LSN
        currentLSN = binascii.hexlify(connection.execute('SELECT sys.fn_cdc_get_max_lsn() as currentLSN').scalar())

        pipeline_builder = sdc_builder.get_pipeline_builder()
        sql_server_cdc = pipeline_builder.add_stage('SQL Server CDC Client')
        sql_server_cdc.set_attributes(table_configs=[{'capture_instance': capture_instance_name,
                                                      'initialOffset': currentLSN.decode("utf-8")}],
                                      use_direct_table_query=use_table
                                      )

        # create the destination table
        dest_table_name = get_random_string(string.ascii_uppercase, 9)
        dest_table = create_table(database, DEFAULT_SCHEMA_NAME, dest_table_name)

        jdbc_producer = pipeline_builder.add_stage('JDBC Producer')

        jdbc_producer.set_attributes(schema_name=DEFAULT_SCHEMA_NAME,
                                     table_name=dest_table_name,
                                     default_operation='INSERT',
                                     field_to_column_mapping=[])

        sql_server_cdc >> jdbc_producer
        pipeline = pipeline_builder.build().configure_for_environment(database)
        sdc_executor.add_pipeline(pipeline)

        sdc_executor.start_pipeline(pipeline).wait_for_pipeline_output_records_count(second_no_of_records)
        sdc_executor.stop_pipeline(pipeline)

        assert_table_replicated(database, rows_in_database[first_no_of_records:total_no_of_records],
                                DEFAULT_SCHEMA_NAME, dest_table_name)

    finally:
        if table is not None:
            logger.info('Dropping table %s in %s database...', table, database.type)
            table.drop(database.engine)

        if dest_table is not None:
            logger.info('Dropping table %s in %s database...', dest_table, database.type)
            dest_table.drop(database.engine)

        if connection is not None:
            connection.close()
def test_sql_server_cdc_with_specific_capture_instance_name(sdc_builder, sdc_executor, database, no_of_threads):
    """Test for SQL Server CDC origin stage when capture instance is configured.
    We do so by capturing Insert Operation on CDC enabled table
    using SQL Server CDC Origin and having a pipeline which reads that data using SQL Server CDC origin stage.
    We setup no_of_tables of CDC tables in SQL Server and configured one specific capture instance name
    so that the records from one table will be stored in SQL Server table using JDBC Producer.
    Data is then asserted for what is captured at SQL Server Job and what we read in the pipeline.
    The pipeline looks like:
        sql_server_cdc_origin >= pipeline_finisher_executor
        sql_server_cdc_origin >> jdbc_producer
    """
    if not database.is_cdc_enabled:
        pytest.skip('Test only runs against SQL Server with CDC enabled.')

    try:
        connection = database.engine.connect()
        schema_name = DEFAULT_SCHEMA_NAME
        tables = []
        table_configs = []
        no_of_records = 5
        target_table_index = 2
        rows_in_database = setup_sample_data(no_of_threads * no_of_records)

        # setup the tables first
        for index in range(0, no_of_threads):
            table_name = get_random_string(string.ascii_lowercase, 20)
            # split the rows_in_database into no_of_records for each table
            # e.g. for no_of_records=5, the first table inserts rows_in_database[0:5]
            # and the secord table inserts rows_in_database[5:10]
            table = setup_table(connection, schema_name, table_name,
                                rows_in_database[(index * no_of_records): ((index + 1) * no_of_records)])
            tables.append(table)
            table_configs.append({'capture_instance': f'{schema_name}_{table_name}'})

        rows_in_database[target_table_index * no_of_records: (target_table_index + 1) * no_of_records]

        pipeline_builder = sdc_builder.get_pipeline_builder()
        sql_server_cdc = pipeline_builder.add_stage('SQL Server CDC Client')
        sql_server_cdc.set_attributes(maximum_pool_size=no_of_threads,
                                      number_of_threads=no_of_threads,
                                      table_configs=table_configs)

        dest_table_name = get_random_string(string.ascii_uppercase, 9)

        dest_table = create_table(database, DEFAULT_SCHEMA_NAME, dest_table_name)
        tables.append(dest_table)
        jdbc_producer = pipeline_builder.add_stage('JDBC Producer')

        jdbc_producer.set_attributes(schema_name=DEFAULT_SCHEMA_NAME,
                                     table_name=dest_table_name,
                                     default_operation='INSERT',
                                     field_to_column_mapping=[])

        sql_server_cdc >> jdbc_producer
        pipeline = pipeline_builder.build().configure_for_environment(database)
        sdc_executor.add_pipeline(pipeline)

        # wait for data captured by cdc jobs in sql server before starting the pipeline
        for table_config in table_configs:
            ct_table_name = f'{table_config.get("capture_instance")}_CT'
            wait_for_data_in_ct_table(ct_table_name, no_of_records, database)

        sdc_executor.start_pipeline(pipeline).wait_for_pipeline_output_records_count(no_of_records * no_of_threads)
        sdc_executor.stop_pipeline(pipeline)

        assert_table_replicated(database, rows_in_database, DEFAULT_SCHEMA_NAME, dest_table_name)

    finally:
        for table in tables:
            logger.info('Dropping table %s in %s database...', table, database.type)
            table.drop(database.engine)
def test_rounding_less():
    # Round up result_3 to lesser value.
    if not result_3:
        pytest.skip(f"You didn't finish this task. the result variable equals None")
    assert math.floor(8 / 3 * 5 + 4.75 - 7) == result_3, f'Expected {math.floor(8 / 3 * 5 + 4.75 - 7)},' \
                                                         f' but got {result_3}'
def test_rounding_greater():
    # Round up result_2 to greater value.
    if not result_2:
        pytest.skip(f"You didn't finish this task. the result variable equals None")
    assert math.ceil(5 / 2 * 6 + 1.25 - 4) == result_2, f'Expected {math.ceil(5 / 2 * 6 + 1.25 - 4)}, ' \
                                                        f'but got {result_2}'
# along with Ansible.  If not, see <https://www.gnu.org/licenses/>.

# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type

import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler

try:
    from ansible.modules.network.fortios import fortios_system_ipv6_neighbor_cache
except ImportError:
    pytest.skip("Could not load required modules for testing",
                allow_module_level=True)


@pytest.fixture(autouse=True)
def connection_mock(mocker):
    connection_class_mock = mocker.patch(
        'ansible.modules.network.fortios.fortios_system_ipv6_neighbor_cache.Connection'
    )
    return connection_class_mock


fos_instance = FortiOSHandler(connection_mock)


def test_system_ipv6_neighbor_cache_creation(mocker):
    schema_method_mock = mocker.patch(
Example #39
0
def pytest_runtest_setup(item):
    """Skip tests marked 'integration' unless an ip address is given."""
    if "integration" in item.keywords and not item.config.getoption("--ip"):
        pytest.skip("use --ip and an ip address to run integration tests.")
Example #40
0
def pytest_runtest_setup(item):
    markers = [marker.name for marker in item.iter_markers()]
    if item.config.getoption("--ci") and "aws" in markers:
        pytest.skip("Skip aws tests during CI")
Example #41
0
 def setup_server(self):
     pytest.skip('BoringSSL does not support server mode at this time')
Example #42
0
def check_template_pack(node, template_pack):
    mark = node.get_closest_marker("only")
    if mark:
        if template_pack not in mark.args:
            pytest.skip("Requires %s template pack" % " or ".join(mark.args))
Example #43
0
def skipped(reason, id=None):
    return pytest.param(lambda: pytest.skip(reason), id=id)
Example #44
0
def test_bgp_gr_helper_routes_perserved(duthosts, rand_one_dut_hostname, nbrhosts, setup_bgp_graceful_restart, tbinfo):
    """Verify that routes received from one neighbor are all preserved during peer graceful restart."""

    def _find_test_bgp_neighbors(test_neighbor_name, bgp_neighbors):
        """Find test BGP neighbor peers."""
        test_bgp_neighbors = []
        for bgp_neighbor, neighbor_details in bgp_neighbors.items():
            if test_neighbor_name == neighbor_details['name']:
                test_bgp_neighbors.append(bgp_neighbor)
        return test_bgp_neighbors

    def _get_rib(duthost):
        """Return DUT rib."""
        routes = {}
        for namespace in duthost.get_frontend_asic_namespace_list():
            bgp_cmd_ipv4 = "vtysh -c \"show bgp ipv4 json\""
            bgp_cmd_ipv6 = "vtysh -c \"show bgp ipv6 json\""
            cmd = duthost.get_vtysh_cmd_for_namespace(bgp_cmd_ipv4, namespace)
            routes.update(json.loads(duthost.shell(cmd, verbose=False)['stdout'])["routes"])
            cmd = duthost.get_vtysh_cmd_for_namespace(bgp_cmd_ipv6, namespace)
            routes.update(json.loads(duthost.shell(cmd, verbose=False)['stdout'])["routes"])
        return routes

    def _get_learned_bgp_routes_from_neighbor(duthost, bgp_neighbor):
        """Get all learned routes from the BGP neighbor."""
        routes = {}
        if is_ipv4_address(unicode(bgp_neighbor)):
            cmd = "vtysh -c 'show bgp ipv4 neighbor %s routes json'" % bgp_neighbor
        else:
            cmd = "vtysh -c 'show bgp ipv6 neighbor %s routes json'" % bgp_neighbor
        for namespace in duthost.get_frontend_asic_namespace_list():
            cmd = duthost.get_vtysh_cmd_for_namespace(cmd, namespace)
            routes.update(json.loads(duthost.shell(cmd, verbose=False)["stdout"])["routes"])
        return routes

    def _get_prefix_counters(duthost, bgp_neighbor, namespace):
        """Get Rib route counters for neighbor."""
        if is_ipv4_address(unicode(bgp_neighbor)):
            cmd = "vtysh -c 'show bgp ipv4 neighbor %s prefix-counts json'" % bgp_neighbor
        else:
            cmd = "vtysh -c 'show bgp ipv6 neighbor %s prefix-counts json'" % bgp_neighbor
        cmd = duthost.get_vtysh_cmd_for_namespace(cmd, namespace)
        cmd_result = json.loads(duthost.shell(cmd, verbose=False)["stdout"])
        return cmd_result

    def _verify_prefix_counters_from_neighbor_during_graceful_restart(duthost, bgp_neighbors):
        """Verify that all routes received from neighbor are stale during graceful restart."""
        for bgp_neighbor in bgp_neighbors:
            for namespace in duthost.get_frontend_asic_namespace_list():
                counters = _get_prefix_counters(duthost, bgp_neighbor, namespace)
                logging.debug("Prefix counters for bgp neighbor %s in namespace %s:\n%s\n", bgp_neighbor, namespace, counters)
                assert counters["ribTableWalkCounters"]["Stale"] == counters["ribTableWalkCounters"]["All RIB"]

    def _verify_bgp_neighbor_routes_during_graceful_restart(neighbor_routes, rib):
        for prefix, nexthops in neighbor_routes.items():
            logging.debug("Check prefix %s, nexthops:\n%s\n", prefix, json.dumps(nexthops))
            if prefix not in rib:
                pytest.fail("Route to prefix %s doesn't exist during graceful restart." % prefix)
            nexthop_expected = nexthops[0]
            bgp_neighbor_expected = nexthop_expected["peerId"]
            for nexthop in rib[prefix]:
                if nexthop["peerId"] == bgp_neighbor_expected:
                    if nexthop.get("stale", False) is False:
                        logging.error("Rib route entry to prefix %s:\n%s\n", prefix, json.dumps(rib[prefix]))
                        pytest.fail("Route to prefix %s should be stale during graceful restart." % prefix)
                    break
            else:
                logging.error("Rib route entry to prefix %s:\n%s\n", prefix, json.dumps(rib[prefix]))
                pytest.fail("Route to prefix doesn't originate from BGP neighbor %s." % bgp_neighbor_expected)

    def _verify_prefix_counters_from_neighbor_after_graceful_restart(duthost, bgp_neighbors):
        """Verify routes from neighbor are relearned and out of stale after graceful restart."""
        for bgp_neighbor in bgp_neighbors:
            for namespace in duthost.get_frontend_asic_namespace_list():
                counters = _get_prefix_counters(duthost, bgp_neighbor, namespace)
                logging.debug("Prefix counters for bgp neighbor %s in namespace %s:\n%s\n", bgp_neighbor, namespace, json.dumps(counters))
                if not (counters["ribTableWalkCounters"]["Stale"] == 0 and counters["ribTableWalkCounters"]["Valid"] > 0):
                    return False
        return True

    duthost = duthosts[rand_one_dut_hostname]

    config_facts = duthost.config_facts(host=duthost.hostname, source="running")['ansible_facts']
    bgp_neighbors = config_facts.get('BGP_NEIGHBOR', {})
    portchannels = config_facts.get('PORTCHANNEL', {})
    dev_nbrs = config_facts.get('DEVICE_NEIGHBOR', {})
    configurations = tbinfo['topo']['properties']['configuration_properties']
    exabgp_ips = [configurations['common']['nhipv4'], configurations['common']['nhipv6']]
    exabgp_sessions = ['exabgp_v4', 'exabgp_v6']

    # select neighbor to test
    if duthost.check_bgp_default_route():
        # if default route is present, select from default route nexthops
        rtinfo_v4 = duthost.get_ip_route_info(ipaddress.ip_network(u"0.0.0.0/0"))
        rtinfo_v6 = duthost.get_ip_route_info(ipaddress.ip_network(u"::/0"))

        ifnames_v4 = [nh[1] for nh in rtinfo_v4['nexthops']]
        ifnames_v6 = [nh[1] for nh in rtinfo_v6['nexthops']]

        ifnames_common = [ ifname for ifname in ifnames_v4 if ifname in ifnames_v6 ]
        if len(ifnames_common) == 0:
            pytest.skip("No common ifnames between ifnames_v4 and ifname_v6: %s and %s" % (ifnames_v4, ifnames_v6))
        test_interface = ifnames_common[0]
    else:
        # if default route is not present, randomly select a neighbor to test
        test_interface = random.sample([k for k, v in dev_nbrs.items() if not v['name'].startswith("Server")], 1)[0]

    # get neighbor device connected ports
    nbr_ports = []
    if test_interface.startswith("PortChannel"):
        for member in portchannels[test_interface]['members']:
            nbr_ports.append(dev_nbrs[member]['port'])
        test_neighbor_name = dev_nbrs[member]['name']
    else:
        nbr_ports.append(dev_nbrs[test_interface]['port'])
        test_neighbor_name = dev_nbrs[test_interface]['name']

    test_neighbor_host = nbrhosts[test_neighbor_name]['host']

    # get neighbor BGP peers
    test_bgp_neighbors = _find_test_bgp_neighbors(test_neighbor_name, bgp_neighbors)

    logging.info("Select neighbor %s to verify that all bgp routes are preserved during graceful restart", test_neighbor_name)

    # get all routes received from neighbor before GR
    all_neighbor_routes_before_gr = {}
    for test_bgp_neighbor in test_bgp_neighbors:
        all_neighbor_routes_before_gr.update(_get_learned_bgp_routes_from_neighbor(duthost, test_bgp_neighbor))

    # verify exabgp sessions to the neighbor are up before GR process
    pytest_assert(
        test_neighbor_host.check_bgp_session_state(exabgp_ips, exabgp_sessions),
        "exabgp sessions {} are not up before graceful restart".format(exabgp_sessions)
    )

    try:
        # shutdown Rib agent, starting GR process
        logger.info("shutdown rib process on neighbor {}".format(test_neighbor_name))
        test_neighbor_host.kill_bgpd()

        # wait till DUT enters NSF state
        for test_bgp_neighbor in test_bgp_neighbors:
            pytest_assert(
                wait_until(60, 5, 0, duthost.check_bgp_session_nsf, test_bgp_neighbor),
                "neighbor {} does not enter NSF state".format(test_bgp_neighbor)
            )

        # confirm routes from the neighbor still there
        rib_after_gr = _get_rib(duthost)
        _verify_bgp_neighbor_routes_during_graceful_restart(all_neighbor_routes_before_gr, rib_after_gr)

        # confirm routes from the neighbor are in STALE state
        _verify_prefix_counters_from_neighbor_during_graceful_restart(duthost, test_bgp_neighbors)

    except Exception:
        test_neighbor_host.start_bgpd()
        raise

    try:
        # shutdown the connected ports from nbr
        logging.info("shutdown the ports connected to neighbor %s: %s", test_neighbor_name, nbr_ports)
        for nbr_port in nbr_ports:
            test_neighbor_host.shutdown(nbr_port)

        # start Rib agent
        logging.info("startup rib process on neighbor {}".format(test_neighbor_name))
        test_neighbor_host.start_bgpd()

        # wait for exabgp sessions to establish
        pytest_assert(
            wait_until(300, 10, 0, test_neighbor_host.check_bgp_session_state, exabgp_ips, exabgp_sessions),
            "exabgp sessions {} are not coming back".format(exabgp_sessions)
        )

    finally:
        # unshut the connected ports from nbr
        logging.info("unshut the ports connected to neighbor %s: %s", test_neighbor_name, nbr_ports)
        for nbr_port in nbr_ports:
            test_neighbor_host.no_shutdown(nbr_port)

    # confirm BGP session are up
    pytest_assert(
        wait_until(300, 10, 0, duthost.check_bgp_session_state, test_bgp_neighbors),
        "graceful restarted bgp sessions {} are not coming back".format(test_bgp_neighbors)
    )

    # confirm routes from the neighbor are restored
    pytest_assert(
        wait_until(300, 10, 0, _verify_prefix_counters_from_neighbor_after_graceful_restart, duthost, test_bgp_neighbors),
        "after graceful restart, Rib is not restored"
    )
Example #45
0
def skip_if_no_dict(loop):
    if not hasattr(loop, '__dict__'):
        pytest.skip("can not override loop attributes")
Example #46
0
 def test_skip_simple(self):
     with pytest.raises(pytest.skip.Exception) as excinfo:
         pytest.skip("xxx")
     assert excinfo.traceback[-1].frame.code.name == "skip"
     assert excinfo.traceback[-1].ishidden()
Example #47
0
 def test_from_dtype(self, data):
     pytest.skip("GH-22666")
Example #48
0
def test_h2oai_benchmarks(f):
    try:
        d = dt.fread(f)
        frame_integrity_check(d)
    except zipfile.BadZipFile:
        pytest.skip("Bad zip file error")
Example #49
0
    def test_status_led(self, duthosts, enum_rand_one_per_hwsku_hostname,
                        localhost, platform_api_conn):
        duthost = duthosts[enum_rand_one_per_hwsku_hostname]
        # TODO: Get a platform-specific list of available colors for the status LED

        FAULT_LED_COLOR_LIST = ["amber", "red"]

        NORMAL_LED_COLOR_LIST = ["green"]

        OFF_LED_COLOR_LIST = ["off"]

        LED_COLOR_TYPES = []
        LED_COLOR_TYPES.append(FAULT_LED_COLOR_LIST)
        LED_COLOR_TYPES.append(NORMAL_LED_COLOR_LIST)

        # Mellanox is not supporting set leds to 'off'
        if duthost.facts.get('asic_type') != "mellanox":
            LED_COLOR_TYPES.append(OFF_LED_COLOR_LIST)

        LED_COLOR_TYPES_DICT = {0: "fault", 1: "normal", 2: "off"}

        led_controllable = True
        led_supported_colors = []
        if duthost.facts.get("chassis"):
            status_led = duthost.facts.get("chassis").get("status_led")
            if status_led:
                led_controllable = status_led.get("controllable", True)
                led_supported_colors = status_led.get("colors")

        if led_controllable:
            led_type_skipped = 0
            for index, led_type in enumerate(LED_COLOR_TYPES):
                if led_supported_colors:
                    led_type = set(led_type) & set(led_supported_colors)
                    if not led_type:
                        logger.warning(
                            "test_status_led: Skipping set status_led to {} (No supported colors)"
                            .format(LED_COLOR_TYPES_DICT[index]))
                        led_type_skipped += 1
                        continue

                led_type_result = False
                for color in led_type:
                    result = chassis.set_status_led(platform_api_conn, color)
                    if self.expect(result is not None,
                                   "Failed to perform set_status_led"):
                        led_type_result = result or led_type_result
                    if ((result is None) or (not result)):
                        continue
                    color_actual = chassis.get_status_led(platform_api_conn)
                    if self.expect(color_actual is not None,
                                   "Failed to retrieve status_led"):
                        if self.expect(isinstance(color_actual, STRING_TYPE),
                                       "Status LED color appears incorrect"):
                            self.expect(
                                color == color_actual,
                                "Status LED color incorrect (expected: {}, actual: {})"
                                .format(color, color_actual))
                self.expect(
                    led_type_result is True,
                    "Failed to set status_led to {}".format(
                        LED_COLOR_TYPES_DICT[index]))

            if led_type_skipped == len(LED_COLOR_TYPES):
                pytest.skip("skipped as no supported colors for all types")

        else:
            pytest.skip("skipped as chassis's status led is not controllable")

        self.assert_expectations()
Example #50
0
 def wrapper(*args, **kwargs):
     try:
         return func(*args, **kwargs)
     except NETWORK_ERROR as exc:  # pragma: no cover
         pytest.skip(str(exc))
Example #51
0
def test_bgp_sender_as_path_loop_detection():
    tgen = get_topogen()

    if tgen.routers_have_failure():
        pytest.skip(tgen.errors)

    router = tgen.gears["r2"]

    def _bgp_converge(router):
        output = json.loads(
            router.vtysh_cmd("show ip bgp neighbor 192.168.255.2 json"))
        expected = {
            "192.168.255.2": {
                "bgpState": "Established",
                "addressFamilyInfo": {
                    "ipv4Unicast": {
                        "acceptedPrefixCounter": 2
                    }
                },
            }
        }
        return topotest.json_cmp(output, expected)

    def _bgp_has_route_from_r1(router):
        output = json.loads(
            router.vtysh_cmd("show ip bgp 172.16.255.254/32 json"))
        expected = {
            "paths": [{
                "aspath": {
                    "segments": [{
                        "type": "as-sequence",
                        "list": [65001, 65003]
                    }],
                    "length": 2,
                }
            }]
        }
        return topotest.json_cmp(output, expected)

    def _bgp_suppress_route_to_r3(router):
        output = json.loads(
            router.vtysh_cmd(
                "show ip bgp neighbor 192.168.254.2 advertised-routes json"))
        expected = {"totalPrefixCounter": 0}
        return topotest.json_cmp(output, expected)

    test_func = functools.partial(_bgp_converge, router)
    success, result = topotest.run_and_expect(test_func,
                                              None,
                                              count=60,
                                              wait=0.5)

    assert result is None, 'Failed bgp convergence in "{}"'.format(router)

    test_func = functools.partial(_bgp_has_route_from_r1, router)
    success, result = topotest.run_and_expect(test_func,
                                              None,
                                              count=60,
                                              wait=0.5)

    assert result is None, 'Failed to see a route from r1 in "{}"'.format(
        router)

    test_func = functools.partial(_bgp_suppress_route_to_r3, router)
    success, result = topotest.run_and_expect(test_func,
                                              None,
                                              count=60,
                                              wait=0.5)

    assert (
        result is None
    ), 'Route 172.16.255.254/32 should not be sent to r3 "{}"'.format(router)
Example #52
0
 def test_array_type_with_arg(self, data, dtype):
     pytest.skip("GH-22666")
Example #53
0
    def _container_exec_ctx(platform):
        partition = local_user_exec_ctx[0]
        if platform not in partition.container_environs.keys():
            pytest.skip(f'{platform} is not configured on the system')

        yield from local_user_exec_ctx
 def test_3d_raises_valueerror(self):
     a = DataArray(easy_array((2, 3, 4)))
     if self.plotfunc.__name__ == 'imshow':
         pytest.skip()
     with raises_regex(ValueError, r'DataArray must be 2d'):
         self.plotfunc(a)
Example #55
0
def real_s3():
    if not S3.should_test():
        pytest.skip("no real s3")
    yield S3(S3.get_url())
Example #56
0
def test_greedy_follower(test_navmesh, scene_graph, pbar):
    global num_fails
    if not osp.exists(test_navmesh):
        pytest.skip(f"{test_navmesh} not found")

    pathfinder = hsim.PathFinder()
    pathfinder.load_nav_mesh(test_navmesh)
    assert pathfinder.is_loaded

    agent = habitat_sim.Agent()
    agent.attach(scene_graph.get_root_node().create_child())
    agent.controls.move_filter_fn = pathfinder.try_step
    follower = habitat_sim.GreedyGeodesicFollower(pathfinder, agent)

    num_tests = 50

    for _ in range(num_tests):
        state = agent.state
        while True:
            state.position = pathfinder.get_random_navigable_point()
            goal_pos = pathfinder.get_random_navigable_point()
            path = hsim.ShortestPath()
            path.requested_start = state.position
            path.requested_end = goal_pos

            if pathfinder.find_path(path) and path.geodesic_distance > 2.0:
                break

        try:
            agent.state = state
            path = follower.find_path(goal_pos)
            for i, action in enumerate(path):
                if action is not None:
                    agent.act(action)
                else:
                    assert i == len(path) - 1

            end_state = agent.state
            assert (np.linalg.norm(end_state.position - goal_pos) <=
                    follower.forward_spec.amount), "Didn't make it"
        except Exception as e:
            if test_all:
                num_fails += 1
                pbar.set_postfix(num_fails=num_fails)
            else:
                raise e

        try:
            agent.state = state
            for _ in range(int(1e4)):
                action = follower.next_action_along(goal_pos)
                if action is None:
                    break
                agent.act(action)

            state = agent.state
            assert (np.linalg.norm(state.position - goal_pos) <=
                    follower.forward_spec.amount), "Didn't make it"
        except Exception as e:
            if test_all:
                num_fails += 1
                pbar.set_postfix(num_fails=num_fails)
            else:
                raise e

    if test_all:
        pbar.update()
 def test_create_accounting(self, server, browser):
     pytest.skip('fix me')
     orgid = actions.create_org(browser, 'TestAccounting')
     accid = actions.create_accounting(browser, orgid)
     actions.create_account(server.database, browser, accid)
async def test_lightchain_integration(request, event_loop):
    """Test LightPeerChain against a local geth instance.

    This test assumes a geth/ropsten instance is listening on 127.0.0.1:30303 and serving light
    clients. In order to achieve that, simply run it with the following command line:

        $ geth -nodekeyhex 45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8 \
               -testnet -lightserv 90
    """
    # TODO: Implement a pytest fixture that runs geth as above, so that we don't need to run it
    # manually.
    if not pytest.config.getoption("--integration"):
        pytest.skip("Not asked to run integration tests")

    base_db = MemoryDB()
    headerdb = FakeAsyncHeaderDB(MemoryDB())
    headerdb.persist_header(ROPSTEN_GENESIS_HEADER)
    peer_pool = LocalGethPeerPool(
        LESPeer, headerdb, ROPSTEN_NETWORK_ID, ecies.generate_privkey(),
    )
    chain = IntegrationTestLightPeerChain(base_db, peer_pool)

    asyncio.ensure_future(peer_pool.run())
    asyncio.ensure_future(chain.run())
    await asyncio.sleep(0)  # Yield control to give the LightPeerChain a chance to start

    def finalizer():
        event_loop.run_until_complete(peer_pool.cancel())
        event_loop.run_until_complete(chain.stop())

    request.addfinalizer(finalizer)

    n = 11

    # Wait for the chain to sync a few headers.
    async def wait_for_header_sync(block_number):
        while headerdb.get_canonical_head().block_number < block_number:
            await asyncio.sleep(0.1)
    await asyncio.wait_for(wait_for_header_sync(n), 2)

    # https://ropsten.etherscan.io/block/11
    b = await chain.get_canonical_block_by_number(n)
    assert isinstance(b, FrontierBlock)
    assert b.number == 11
    assert encode_hex(b.hash) == (
        '0xda882aeff30f59eda9da2b3ace3023366ab9d4219b5a83cdd589347baae8678e')
    assert len(b.transactions) == 15
    assert isinstance(b.transactions[0], b.transaction_class)

    receipts = await chain.get_receipts(b.hash)
    assert len(receipts) == 15
    assert encode_hex(keccak(rlp.encode(receipts[0]))) == (
        '0xf709ed2c57efc18a1675e8c740f3294c9e2cb36ba7bb3b89d3ab4c8fef9d8860')

    assert len(chain.peer_pool.peers) == 1
    head_info = chain.peer_pool.peers[0].head_info
    head = await chain.get_block_by_hash(head_info.block_hash)
    assert head.number == head_info.block_number

    # In order to answer queries for contract code, geth needs the state trie entry for the block
    # we specify in the query, but because of fast sync we can only assume it has that for recent
    # blocks, so we use the current head to lookup the code for the contract below.
    # https://ropsten.etherscan.io/address/0x95a48dca999c89e4e284930d9b9af973a7481287
    contract_addr = decode_hex('95a48dca999c89e4e284930d9b9af973a7481287')
    contract_code = await chain.get_contract_code(head.hash, keccak(contract_addr))
    assert encode_hex(keccak(contract_code)) == (
        '0x1e0b2ad970b365a217c40bcf3582cbb4fcc1642d7a5dd7a82ae1e278e010123e')

    account = await chain.get_account(head.hash, contract_addr)
    assert account.code_hash == keccak(contract_code)
    assert account.balance == 0
# Copyright 2019 Open End AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import pytest
try:
    import selenium
except ImportError:
    pytest.skip()

from testapi import actions

@pytest.mark.usefixtures('server', 'browser')
class TestAccounting(object):

    def test_create_accounting(self, server, browser):
        pytest.skip('fix me')
        orgid = actions.create_org(browser, 'TestAccounting')
        accid = actions.create_accounting(browser, orgid)
        actions.create_account(server.database, browser, accid)
from settings.testcase import BlobPreparer
from devtools_testutils.storage import StorageTestCase

# ------------------------------------------------------------------------------
from azure.storage.blob._shared.uploads import SubStream

TEST_BLOB_PREFIX = 'largestblob'
LARGEST_BLOCK_SIZE = 4000 * 1024 * 1024
LARGEST_SINGLE_UPLOAD_SIZE = 5000 * 1024 * 1024

LARGE_BLOCK_SIZE = 100 * 1024 * 1024

# ------------------------------------------------------------------------------
if platform.python_implementation() == 'PyPy':
    pytest.skip("Skip tests for Pypy", allow_module_level=True)

class StorageLargestBlockBlobTest(StorageTestCase):
    def _setup(self, storage_account_name, key, additional_policies=None, min_large_block_upload_threshold=1 * 1024 * 1024,
               max_single_put_size=32 * 1024):
        self.bsc = BlobServiceClient(
            self.account_url(storage_account_name, "blob"),
            credential=key,
            max_single_put_size=max_single_put_size,
            max_block_size=LARGEST_BLOCK_SIZE,
            min_large_block_upload_threshold=min_large_block_upload_threshold,
            _additional_pipeline_policies=additional_policies)
        self.config = self.bsc._config
        self.container_name = self.get_resource_name('utcontainer')
        self.container_name = self.container_name + str(uuid.uuid4())