Esempio n. 1
0
def _intercept_http_request(self, uri, method="GET", headers={}, **kwargs):
    if (self.max_request_size and kwargs.get('body')
            and self.max_request_size < len(kwargs['body'])):
        raise apiclient_errors.MediaUploadSizeError(
            "Request size %i bytes exceeds published limit of %i bytes" %
            (len(kwargs['body']), self.max_request_size))

    if config.get("ARVADOS_EXTERNAL_CLIENT", "") == "true":
        headers['X-External-Client'] = '1'

    headers['Authorization'] = 'OAuth2 %s' % self.arvados_api_token
    if not headers.get('X-Request-Id'):
        headers['X-Request-Id'] = self._request_id()

    retryable = method in ['DELETE', 'GET', 'HEAD', 'OPTIONS', 'PUT']
    retry_count = self._retry_count if retryable else 0

    if (not retryable and
            time.time() - self._last_request_time > self._max_keepalive_idle):
        # High probability of failure due to connection atrophy. Make
        # sure this request [re]opens a new connection by closing and
        # forgetting all cached connections first.
        for conn in self.connections.values():
            conn.close()
        self.connections.clear()

    delay = self._retry_delay_initial
    for _ in range(retry_count):
        self._last_request_time = time.time()
        try:
            return self.orig_http_request(uri,
                                          method,
                                          headers=headers,
                                          **kwargs)
        except http.client.HTTPException:
            _logger.debug("Retrying API request in %d s after HTTP error",
                          delay,
                          exc_info=True)
        except socket.error:
            # This is the one case where httplib2 doesn't close the
            # underlying connection first.  Close all open
            # connections, expecting this object only has the one
            # connection to the API server.  This is safe because
            # httplib2 reopens connections when needed.
            _logger.debug("Retrying API request in %d s after socket error",
                          delay,
                          exc_info=True)
            for conn in self.connections.values():
                conn.close()
        except httplib2.SSLHandshakeError as e:
            # Intercept and re-raise with a better error message.
            raise httplib2.SSLHandshakeError(
                "Could not connect to %s\n%s\nPossible causes: remote SSL/TLS certificate expired, or was issued by an untrusted certificate authority."
                % (uri, e))

        time.sleep(delay)
        delay = delay * self._retry_delay_backoff

    self._last_request_time = time.time()
    return self.orig_http_request(uri, method, headers=headers, **kwargs)
Esempio n. 2
0
def _intercept_http_request(self, uri, **kwargs):
    from httplib import BadStatusLine

    if (self.max_request_size and kwargs.get('body')
            and self.max_request_size < len(kwargs['body'])):
        raise apiclient_errors.MediaUploadSizeError(
            "Request size %i bytes exceeds published limit of %i bytes" %
            (len(kwargs['body']), self.max_request_size))

    if 'headers' not in kwargs:
        kwargs['headers'] = {}

    if config.get("ARVADOS_EXTERNAL_CLIENT", "") == "true":
        kwargs['headers']['X-External-Client'] = '1'

    kwargs['headers']['Authorization'] = 'OAuth2 %s' % self.arvados_api_token
    try:
        return self.orig_http_request(uri, **kwargs)
    except BadStatusLine:
        # This is how httplib tells us that it tried to reuse an
        # existing connection but it was already closed by the
        # server. In that case, yes, we would like to retry.
        # Unfortunately, we are not absolutely certain that the
        # previous call did not succeed, so this is slightly
        # risky.
        return self.orig_http_request(uri, **kwargs)
Esempio n. 3
0
def _intercept_http_request(self, uri, **kwargs):
    if (self.max_request_size and kwargs.get('body')
            and self.max_request_size < len(kwargs['body'])):
        raise apiclient_errors.MediaUploadSizeError(
            "Request size %i bytes exceeds published limit of %i bytes" %
            (len(kwargs['body']), self.max_request_size))

    if 'headers' not in kwargs:
        kwargs['headers'] = {}

    if config.get("ARVADOS_EXTERNAL_CLIENT", "") == "true":
        kwargs['headers']['X-External-Client'] = '1'

    kwargs['headers']['Authorization'] = 'OAuth2 %s' % self.arvados_api_token

    retryable = kwargs.get(
        'method', 'GET') in ['DELETE', 'GET', 'HEAD', 'OPTIONS', 'PUT']
    retry_count = self._retry_count if retryable else 0

    if (not retryable and
            time.time() - self._last_request_time > self._max_keepalive_idle):
        # High probability of failure due to connection atrophy. Make
        # sure this request [re]opens a new connection by closing and
        # forgetting all cached connections first.
        for conn in self.connections.itervalues():
            conn.close()
        self.connections.clear()

    delay = self._retry_delay_initial
    for _ in range(retry_count):
        self._last_request_time = time.time()
        try:
            return self.orig_http_request(uri, **kwargs)
        except httplib.HTTPException:
            _logger.debug("Retrying API request in %d s after HTTP error",
                          delay,
                          exc_info=True)
        except socket.error:
            # This is the one case where httplib2 doesn't close the
            # underlying connection first.  Close all open
            # connections, expecting this object only has the one
            # connection to the API server.  This is safe because
            # httplib2 reopens connections when needed.
            _logger.debug("Retrying API request in %d s after socket error",
                          delay,
                          exc_info=True)
            for conn in self.connections.itervalues():
                conn.close()
        time.sleep(delay)
        delay = delay * self._retry_delay_backoff

    self._last_request_time = time.time()
    return self.orig_http_request(uri, **kwargs)