def test_resource_exhausted_captured(self):
     """Test to make sure resource exhausted error is being
     captured to retry.
     """
     error = http.HttpError(mock.Mock(status=429),
                            'Resource Exhausted'.encode())
     self.assertTrue(retryable_exceptions.is_retryable_exception(error))
Ejemplo n.º 2
0
    def next_chunk(self, num_retries=None):
        error_codes = CONF.backup_gcs_retry_error_codes
        headers = {'range': 'bytes=%d-%d' %
                   (self._progress, self._progress + self._chunksize)}

        gcs_http = self._request.http
        for retry_num in range(num_retries + 1):
            if retry_num > 0:
                self._sleep(self._rand() * 2 ** retry_num)

            resp, content = gcs_http.request(self._uri, headers=headers)
            if resp.status < 500 and (six.text_type(resp.status)
                                      not in error_codes):
                break
        if resp.status in [200, 206]:
            if 'content-location' in resp and (
                    resp['content-location'] != self._uri):
                self._uri = resp['content-location']
            self._progress += len(content)
            self._fd.write(content)

            if 'content-range' in resp:
                content_range = resp['content-range']
                length = content_range.rsplit('/', 1)[1]
                self._total_size = int(length)
            elif 'content-length' in resp:
                self._total_size = int(resp['content-length'])

            if self._progress == self._total_size:
                self._done = True
            return (http.MediaDownloadProgress(self._progress,
                    self._total_size), self._done)

        else:
            raise http.HttpError(resp, content, uri=self._uri)
Ejemplo n.º 3
0
    async def _async_execute(self, http, num_retries=0):
        _http = http.http

        if http.resumable:
            body = None
            while body is None:
                _, body = http.next_chunk(http=http, num_retries=num_retries)
                await asyncio.sleep(1/30)
            return body

        if 'content-length' not in http.headers:
            http.headers['content-length'] = str(http.body_size)
        if len(http.uri) > 2048 and http.method == 'GET':
            http.method = 'POST'
            http.headers['x-http-method-override'] = 'GET'
            http.headers['content-type'] = 'application/x-www-form-urlencoded'
            parsed = urllib.urlparse(http.uri)
            http.uri = urllib.urlunparse(
                (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None,
                None))

            http.body = parsed.query
            http.headers['content-length'] = str(len(http.body))

        # Handle retries for server-side errors.
        resp, content = HTTP._retry_request(
            http.http, num_retries, 'request', http._sleep, http._rand, str(http.uri),
            method=str(http.method), body=http.body, headers=http.headers)

        for callback in http.response_callbacks:
            callback(resp)
        if resp.status >= 300:
            raise HTTP.HttpError(resp, content, uri=http.uri)
        return http.postproc(resp, content)