Exemplo n.º 1
0
 def test_connection_timeouts_are_retried(self):
     # If a connection times out, we get a Timout exception
     # from requests.  We should be retrying those.
     handler = retryhandler.create_retry_handler(
         self.retry_config, operation_name='OperationBar')
     sleep_time = handler(response=None, attempts=1,
                          caught_exception=ReadTimeoutError(endpoint_url=''))
     self.assertEqual(sleep_time, 1)
Exemplo n.º 2
0
 def test_download_s3_object_bucket_timeout(self, mock_logger,
                                            mock_s3_conn):
     """LookupTables - Download S3 Object, ReadTimeoutError"""
     mock_s3_conn.side_effect = ReadTimeoutError('TestPool',
                                                 'Test Read timed out.',
                                                 endpoint_url='test/url')
     self.buckets_info['bucket_name'].pop()
     LookupTables._download_s3_objects(self.buckets_info)
     assert_equal(LookupTables._tables, {})
     mock_logger.assert_called_with('Reading %s from S3 timed out',
                                    'foo.json')
Exemplo n.º 3
0
    def send(self, request):
        try:
            proxy_url = self._proxy_config.proxy_url_for(request.url)
            manager = self._get_connection_manager(request.url, proxy_url)
            conn = manager.connection_from_url(request.url)
            self._setup_ssl_cert(conn, request.url, self._verify)

            request_target = self._get_request_target(request.url, proxy_url)
            urllib_response = conn.urlopen(
                method=request.method,
                url=request_target,
                body=request.body,
                headers=request.headers,
                retries=Retry(False),
                assert_same_host=False,
                preload_content=False,
                decode_content=False,
                chunked=self._chunked(request.headers),
            )

            http_response = botocore.awsrequest.AWSResponse(
                request.url,
                urllib_response.status,
                urllib_response.headers,
                urllib_response,
            )

            if not request.stream_output:
                # Cause the raw stream to be exhausted immediately. We do it
                # this way instead of using preload_content because
                # preload_content will never buffer chunked responses
                http_response.content

            return http_response
        except URLLib3SSLError as e:
            raise SSLError(endpoint_url=request.url, error=e)
        except (NewConnectionError, socket.gaierror) as e:
            raise EndpointConnectionError(endpoint_url=request.url, error=e)
        except ProxyError as e:
            raise ProxyConnectionError(proxy_url=proxy_url, error=e)
        except URLLib3ConnectTimeoutError as e:
            raise ConnectTimeoutError(endpoint_url=request.url, error=e)
        except URLLib3ReadTimeoutError as e:
            raise ReadTimeoutError(endpoint_url=request.url, error=e)
        except ProtocolError as e:
            raise ConnectionClosedError(
                error=e,
                request=request,
                endpoint_url=request.url
            )
        except Exception as e:
            message = 'Exception received when sending urllib3 HTTP request'
            logger.debug(message, exc_info=True)
            raise HTTPClientError(error=e)
Exemplo n.º 4
0
    def test_run_does_retries_for_get_object(self):
        self.add_get_object_job()
        self.add_shutdown()
        self.add_stubbed_get_object_response(
            body=StreamWithError(self.stream, ReadTimeoutError(
                endpoint_url='')))
        self.add_stubbed_get_object_response()

        self.worker.run()
        self.stubber.assert_no_pending_responses()
        self.assert_contents(self.temp_filename, self.remote_contents)
Exemplo n.º 5
0
    def test_calculate_sha256_read_timeout(self):
        bucket = 'test-bucket'
        key = 'dir/a'
        vid = 'a1234'

        a_contents = b'a' * 10

        pk = PhysicalKey(bucket, key, vid)
        with mock.patch('botocore.client.BaseClient._make_api_call',
                        side_effect=ReadTimeoutError('Error Uploading', endpoint_url="s3://foobar")):
            results = data_transfer.calculate_sha256([pk], [len(a_contents)])
            assert list(results) == [None]
Exemplo n.º 6
0
    def test_botocore_read_timeout(self, mock_logger, mock_s3_conn):
        """LookupTables - Drivers - S3 Driver - Get - ReadTimeoutError"""
        mock_s3_conn.side_effect = ReadTimeoutError('TestPool',
                                                    'Test Read timed out.',
                                                    endpoint_url='test/url')

        assert_raises(LookupTablesInitializationError,
                      self._foo_driver.initialize)

        mock_logger.assert_called_with(
            'LookupTable (%s): Reading from S3 timed out',
            's3:bucket_name/foo.json')
Exemplo n.º 7
0
    def test_calculate_sha256_read_timeout(self, mocked_api_call):
        bucket = 'test-bucket'
        key = 'dir/a'
        vid = 'a1234'

        a_contents = b'a' * 10

        pk = PhysicalKey(bucket, key, vid)
        exc = ReadTimeoutError('Error Uploading', endpoint_url="s3://foobar")
        mocked_api_call.side_effect = exc
        results = data_transfer.calculate_sha256([pk], [len(a_contents)])
        assert mocked_api_call.call_count == data_transfer.MAX_FIX_HASH_RETRIES
        assert results == [exc]
Exemplo n.º 8
0
    def test_run_can_exhaust_retries_for_get_object(self):
        self.add_get_object_job()
        self.add_shutdown()
        # 5 is the current setting for max number of GetObject attempts
        for _ in range(5):
            self.add_stubbed_get_object_response(body=StreamWithError(
                self.stream, ReadTimeoutError(endpoint_url='')))

        self.worker.run()
        self.stubber.assert_no_pending_responses()
        self.assertIsInstance(
            self.transfer_monitor.get_exception(self.transfer_id),
            RetriesExceededError)
Exemplo n.º 9
0
 def create_context(self, is_timeout_error=False, status_code=200):
     caught_exception = None
     if is_timeout_error:
         caught_exception = ReadTimeoutError(endpoint_url='https://foo')
     http_response = AWSResponse(status_code=status_code, raw=None,
                                 headers={}, url='https://foo/')
     context = standard.RetryContext(
         attempt_number=1,
         request_context=self.request_context,
         caught_exception=caught_exception,
         http_response=http_response,
     )
     return context
Exemplo n.º 10
0
    def test_botocore_read_timeout(self, mock_logger, boto_resource_fn_mock):
        """LookupTables - Drivers - DynamoDB Driver - Get - ReadTimeoutError"""
        boto_resource_fn_mock.return_value.Table.return_value.get_item.side_effect = \
            ReadTimeoutError(
                'TestPool', 'Test Read timed out.', endpoint_url='test/url'
            )

        self._driver.initialize()

        assert_raises(LookupTablesInitializationError, self._driver.get,
                      'bbbb:1')

        mock_logger.assert_any_call(
            'LookupTable (%s): Reading from DynamoDB timed out',
            'dynamodb:table_name')
Exemplo n.º 11
0
    def read(self, amt=None):
        """Read at most amt bytes from the stream.

        If the amt argument is omitted, read all data.
        """
        try:
            chunk = self._raw_stream.read(amt)
        except URLLib3ReadTimeoutError as e:
            # TODO: the url will be None as urllib3 isn't setting it yet
            raise ReadTimeoutError(endpoint_url=e.url, error=e)
        self._amount_read += len(chunk)
        if amt is None or (not chunk and amt > 0):
            # If the server sends empty contents or
            # we ask to read all of the contents, then we know
            # we need to verify the content length.
            self._verify_content_length()
        return chunk
Exemplo n.º 12
0
    def send(self, request):
        try:
            proxy_url = self._proxy_config.proxy_url_for(request.url)
            manager = self._get_connection_manager(request.url, proxy_url)
            conn = manager.connection_from_url(request.url)
            self._setup_ssl_cert(conn, request.url, self._verify)
            if ensure_boolean(
                    os.environ.get('BOTO_EXPERIMENTAL__ADD_PROXY_HOST_HEADER',
                                   '')):
                # This is currently an "experimental" feature which provides
                # no guarantees of backwards compatibility. It may be subject
                # to change or removal in any patch version. Anyone opting in
                # to this feature should strictly pin botocore.
                host = urlparse(request.url).hostname
                conn.proxy_headers['host'] = host

            request_target = self._get_request_target(request.url, proxy_url)
            urllib_response = conn.urlopen(
                method=request.method,
                url=request_target,
                body=request.body,
                headers=request.headers,
                retries=Retry(False),
                assert_same_host=False,
                preload_content=False,
                decode_content=False,
                chunked=self._chunked(request.headers),
            )

            http_response = botocore.awsrequest.AWSResponse(
                request.url,
                urllib_response.status,
                urllib_response.headers,
                urllib_response,
            )

            if not request.stream_output:
                # Cause the raw stream to be exhausted immediately. We do it
                # this way instead of using preload_content because
                # preload_content will never buffer chunked responses
                http_response.content

            return http_response
        except URLLib3SSLError as e:
            raise SSLError(endpoint_url=request.url, error=e)
        except (NewConnectionError, socket.gaierror) as e:
            raise EndpointConnectionError(endpoint_url=request.url, error=e)
        except ProxyError as e:
            raise ProxyConnectionError(proxy_url=proxy_url, error=e)
        except URLLib3ConnectTimeoutError as e:
            raise ConnectTimeoutError(endpoint_url=request.url, error=e)
        except URLLib3ReadTimeoutError as e:
            raise ReadTimeoutError(endpoint_url=request.url, error=e)
        except ProtocolError as e:
            raise ConnectionClosedError(error=e,
                                        request=request,
                                        endpoint_url=request.url)
        except Exception as e:
            message = 'Exception received when sending urllib3 HTTP request'
            logger.debug(message, exc_info=True)
            raise HTTPClientError(error=e)