Ejemplo n.º 1
0
def test_timeout():
    transport = Transport(urlparse.urlparse('http://localhost'))
    responses.add('POST', '/', status=202,
                  body=MaxRetryError(None, None, reason=TimeoutError()))
    with pytest.raises(TransportException) as exc_info:
        transport.send('x', {}, timeout=5)
    assert 'timeout' in str(exc_info.value)
Ejemplo n.º 2
0
def wait_for_plugins(client, namespace="docker", timeout=30, max_delay=5):
    for plugin_name, plugin_info in PLUGIN_MAP.items():
        if not plugin_info['running']:
            delay_time = 0.1
            total_wait_time = 0

            while not plugin_info['running']:
                system = client.find_unique_system(
                    name=plugin_name,
                    namespace=namespace,
                    version=plugin_info.get("version"))

                is_running = True
                if system and system.instances:
                    for instance in system.instances:
                        if instance.status != 'RUNNING':
                            is_running = False
                            break
                PLUGIN_MAP[plugin_name]['running'] = is_running

                if is_running:
                    plugin_info['running'] = True
                else:
                    if total_wait_time > timeout:
                        raise TimeoutError(
                            "Timed out waiting to connect to beer-garden.")

                    time.sleep(delay_time)
                    total_wait_time += delay_time
                    delay_time = min(delay_time * 2, max_delay)
Ejemplo n.º 3
0
def wait_for_in_progress(client, request, timeout=1, max_delay=1):
    """Generate a request and wait for that request to be marked as IN_PROGRESS

    Will throw a ValueError if it misses the window (i.e. a request goes to a completed state
    and we never saw it IN_PROGRESS)

    :param client:
    :param request:
    :param timeout:
    :param max_delay:
    :return:
    """
    request = client.create_request(request)
    delay_time = 0.01
    total_wait_time = 0
    while request.status != 'IN_PROGRESS':

        if request.status in COMPLETED_STATUSES:
            raise ValueError(
                "Error waiting for request to go to in progress. Status %s" %
                request.status)

        if timeout and total_wait_time > timeout:
            raise TimeoutError(
                "Timed out waiting for request to go to IN_PROGRESS")

        time.sleep(delay_time)
        total_wait_time += delay_time
        delay_time = min(delay_time * 2, max_delay)

        request = client.find_unique_request(id=request.id)

    return request
Ejemplo n.º 4
0
def stop_instance(client, instance, timeout=1, max_delay=1):
    response = client.client.patch_instance(
        instance.id, client.parser.serialize_patch(PatchOperation('stop')))
    if 400 <= response.status_code < 500:
        raise ValidationError(response.json())
    elif response.status_code >= 500:
        raise SaveError(response.json())
    else:
        instance = client.parser.parse_instance(response.json())

    instance = get_instance(client, instance.id)
    delay_time = 0.01
    total_wait_time = 0
    while instance.status not in ['DEAD', 'STOPPED', 'UNRESPONSIVE']:

        if timeout and total_wait_time > timeout:
            raise TimeoutError("Timed out waiting for instance to stop")

        time.sleep(delay_time)
        total_wait_time += delay_time
        delay_time = min(delay_time * 2, max_delay)

        instance = get_instance(client, instance.id)

    return instance
Ejemplo n.º 5
0
def test_timeout():
    transport = Transport("http://localhost", timeout=5)
    try:
        responses.add("POST", "/", status=202, body=MaxRetryError(None, None, reason=TimeoutError()))
        with pytest.raises(TransportException) as exc_info:
            transport.send("x")
        assert "timeout" in str(exc_info.value)
    finally:
        transport.close()
Ejemplo n.º 6
0
def test_timeout(mock_urlopen):
    transport = Transport("http://localhost", timeout=5)
    mock_urlopen.side_effect = MaxRetryError(None, None, reason=TimeoutError())
    try:
        with pytest.raises(TransportException) as exc_info:
            transport.send("x")
        assert "timeout" in str(exc_info.value)
    finally:
        transport.close()
Ejemplo n.º 7
0
 def test_resync_etcd_read_fail(self, m_sleep):
     """
     Test a read failure on the snapshot.
     """
     # Initial handshake.
     self.start_driver_and_handshake()
     # Start streaming some data.
     snap_stream, watcher_req = self.start_snapshot_response()
     # But then the read times out...
     snap_stream.write(TimeoutError())
     # Triggering a restart of the resync loop.
     self.assert_status_message(STATUS_WAIT_FOR_READY)
Ejemplo n.º 8
0
 def test_exceptions_with_objects(self):
     assert self.cycle(HTTPError('foo'))
     assert self.cycle(
         MaxRetryError(HTTPConnectionPool('localhost'), '/', None))
     assert self.cycle(LocationParseError('fake location'))
     assert self.cycle(
         ClosedPoolError(HTTPConnectionPool('localhost'), None))
     assert self.cycle(EmptyPoolError(HTTPConnectionPool('localhost'),
                                      None))
     assert self.cycle(
         HostChangedError(HTTPConnectionPool('localhost'), '/', None))
     assert self.cycle(
         TimeoutError(HTTPConnectionPool('localhost'), '/', None))
Ejemplo n.º 9
0
def test_timeout(mock_urlopen, elasticapm_client):
    elasticapm_client.server_version = (8, 0
                                        )  # avoid making server_info request
    transport = Transport("http://localhost",
                          timeout=5,
                          client=elasticapm_client)
    transport.start_thread()
    mock_urlopen.side_effect = MaxRetryError(None, None, reason=TimeoutError())
    try:
        with pytest.raises(TransportException) as exc_info:
            transport.send("x")
        assert "timeout" in str(exc_info.value)
    finally:
        transport.close()
Ejemplo n.º 10
0
def wait_for_response(client, request, timeout=1, max_delay=1):
    request = client.create_request(request)
    delay_time = 0.01
    total_wait_time = 0
    while request.status not in COMPLETED_STATUSES:

        if timeout and total_wait_time > timeout:
            raise TimeoutError("Timed out waiting for request to complete")

        time.sleep(delay_time)
        total_wait_time += delay_time
        delay_time = min(delay_time * 2, max_delay)

        request = client.find_unique_request(id=request.id)

    return request
Ejemplo n.º 11
0
def wait_for_connection(client, timeout=30, max_delay=5):
    connected = False
    delay_time = 0.1
    total_wait_time = 0

    while not connected:
        try:
            client.get_version()
            connected = True
        except Exception:
            if total_wait_time > timeout:
                raise TimeoutError(
                    "Timed out waiting to connect to beer-garden.")

            time.sleep(delay_time)
            total_wait_time += delay_time
            delay_time = min(delay_time * 2, max_delay)
Ejemplo n.º 12
0
def stop_instance(client, instance, timeout=15, max_delay=1):
    instance = client.update_instance_status(instance.id, 'STOPPED')

    delay_time = 0.01
    total_wait_time = 0
    while instance.status not in ['DEAD', 'STOPPED', 'UNRESPONSIVE']:

        if timeout and total_wait_time > timeout:
            raise TimeoutError("Timed out waiting for instance to stop")

        time.sleep(delay_time)
        total_wait_time += delay_time
        delay_time = min(delay_time * 2, max_delay)

        instance = get_instance(client, instance.id)

    return instance
Ejemplo n.º 13
0
    def test_get_dropbox_client(self, now_mock, check_user_mock,
                                refresh_access_token_mock):
        now_mock.return_value = timezone.make_aware(
            timezone.datetime(2020, 1, 1))
        check_user_mock.return_value = None

        Notification.objects.all().delete()
        self.assertEqual(Notification.objects.count(), 0)

        # Generic connection error. Should NOT reset credentials.
        refresh_access_token_mock.side_effect = TimeoutError()  # Network error

        with self.assertRaises(TimeoutError):
            dsmr_dropbox.services.get_dropbox_client(self.schedule_process)

        self.assertEqual(Notification.objects.count(), 0)
        self.schedule_process.refresh_from_db()
        self.assertTrue(self.schedule_process.active)

        # Dropbox Auth Error. Will reset credentials. Warning message should be created and SP disabled.
        self.schedule_process.reschedule_asap()
        DropboxSettings.objects.all().update(refresh_token='invalid-token')
        refresh_access_token_mock.reset_mock()
        refresh_access_token_mock.side_effect = dropbox.exceptions.AuthError(
            12345, "Some error")

        with self.assertRaises(dropbox.exceptions.AuthError):
            dsmr_dropbox.services.get_dropbox_client(self.schedule_process)

        self.assertEqual(Notification.objects.count(), 1)
        self.schedule_process.refresh_from_db()
        self.assertFalse(self.schedule_process.active)

        # Happy flow
        self.schedule_process.reschedule_asap()
        refresh_access_token_mock.reset_mock()
        refresh_access_token_mock.side_effect = None
        DropboxSettings.objects.all().update(refresh_token='token')

        dsmr_dropbox.services.get_dropbox_client(self.schedule_process)
Ejemplo n.º 14
0
    def urlopen(self, method, url, redirect=True, **kw):
        """
        Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
        with custom cross-host redirect logic and only sends the request-uri
        portion of the ``url``.

        The given ``url`` parameter must be absolute, such that an appropriate
        :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
        """
        #===============================================================================================================
        # add by mz
        error_type = kw.get('error_type')

        if error_type:

            from urllib3.exceptions import LocationValueError, HostChangedError, LocationParseError, ConnectTimeoutError
            from urllib3.exceptions import ProxyError, TimeoutError, ReadTimeoutError, ProtocolError, DecodeError
            from urllib3.exceptions import ResponseError, ResponseNotChunked, SSLError, HTTPError, HTTPWarning, PoolError
            from urllib3.exceptions import RequestError, MaxRetryError, TimeoutStateError, NewConnectionError
            from urllib3.exceptions import EmptyPoolError, ClosedPoolError, SecurityWarning, SubjectAltNameWarning
            from urllib3.exceptions import InsecureRequestWarning, SystemTimeWarning, InsecurePlatformWarning
            from urllib3.exceptions import SNIMissingWarning, DependencyWarning, ProxySchemeUnknown, HeaderParsingError
            get_error = {
                "LocationValueError":
                LocationValueError(),
                "HostChangedError":
                HostChangedError(pool=1, url=2),
                "LocationParseError":
                LocationParseError(url),
                "ConnectTimeoutError":
                ConnectTimeoutError(),
                "ProxyError":
                ProxyError(),
                "TimeoutError":
                TimeoutError(),
                "ReadTimeoutError":
                ReadTimeoutError(pool=1, url=2, message="ReadTimeoutError"),
                "ProtocolError":
                ProtocolError(),
                "DecodeError":
                DecodeError(),
                "ResponseError":
                ResponseError(),
                "ResponseNotChunked":
                ResponseNotChunked(),
                "SSLError":
                SSLError(),
                "HTTPError":
                HTTPError(),
                "HTTPWarning":
                HTTPWarning(),
                "PoolError":
                PoolError(pool=1, message=2),
                "RequestError":
                RequestError(pool=1, url=2, message="RequestError"),
                "MaxRetryError":
                MaxRetryError(pool=1, url=2, reason=None),
                "TimeoutStateError":
                TimeoutStateError(),
                "NewConnectionError":
                NewConnectionError(pool=1, message="NewConnectionError"),
                "EmptyPoolError":
                EmptyPoolError(pool=1, message="EmptyPoolError"),
                "ClosedPoolError":
                ClosedPoolError(pool=1, message="ClosedPoolError"),
                "SecurityWarning":
                SecurityWarning(),
                "SubjectAltNameWarning":
                SubjectAltNameWarning(),
                "InsecureRequestWarning":
                InsecureRequestWarning(),
                "SystemTimeWarning":
                SystemTimeWarning(),
                "InsecurePlatformWarning":
                InsecurePlatformWarning(),
                "SNIMissingWarning":
                SNIMissingWarning(),
                "DependencyWarning":
                DependencyWarning(),
                "ProxySchemeUnknown":
                ProxySchemeUnknown(scheme=1),
                "HeaderParsingError":
                HeaderParsingError(defects=1, unparsed_data=2)
            }
            error_ = get_error[error_type]
            raise error_
        #===============================================================================================================

        u = parse_url(url)
        conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)

        kw['assert_same_host'] = False
        kw['redirect'] = False
        if 'headers' not in kw:
            kw['headers'] = self.headers

        if self.proxy is not None and u.scheme == "http":
            response = conn.urlopen(method, url, **kw)
        else:
            response = conn.urlopen(method, u.request_uri, **kw)

        redirect_location = redirect and response.get_redirect_location()
        if not redirect_location:
            return response

        # Support relative URLs for redirecting.
        redirect_location = urljoin(url, redirect_location)

        # RFC 2616, Section 10.3.4
        if response.status == 303:
            method = 'GET'

        log.info("Redirecting %s -> %s" % (url, redirect_location))
        kw['retries'] = kw.get('retries', 3) - 1  # Persist retries countdown
        kw['redirect'] = redirect
        return self.urlopen(method, redirect_location, **kw)