def _request( self, method: str, path: str, params: ty.Sequence[ty.Tuple[str, str]], *, auth: auth_t, data: reqdata_sync_t, headers: headers_t, timeout: timeout_t, chunk_size: ty.Optional[int] ) -> ty.Tuple[ty.List[Closable], ty.Generator[bytes, ty.Any, ty.Any]]: # Ensure path is relative so that it is resolved relative to the base while path.startswith("/"): path = path[1:] url = urllib.parse.urljoin(self._base_url, path) try: # Determine session object to use closables, session = self._access_session() # Do HTTP request (synchronously) and map exceptions try: res = session.request( method=method, url=url, **map_args_to_requests( params=params, auth=auth, headers=headers, timeout=(timeout if timeout is not None else self._default_timeout), ), proxies=self._request_proxies, data=data, stream=True, ) closables.insert(0, res) except (requests.ConnectTimeout, requests.Timeout) as error: # type: ignore[attr-defined] raise exceptions.TimeoutError(error) from error except requests.ConnectionError as error: # type: ignore[attr-defined] # Report protocol violations separately # # This used to happen because requests wouldn't catch # `http.client.HTTPException` at all, now we recreate # this behaviour manually if we detect it. if isinstance(error.args[0], urllib3.exceptions.ProtocolError): raise exceptions.ProtocolError( error.args[0]) from error.args[0] raise exceptions.ConnectionError(error) from error # Looks like the following error doesn't happen anymore with modern requests? except http.client.HTTPException as error: # pragma: no cover raise exceptions.ProtocolError(error) from error # Raise exception for response status # (optionally incorporating the response message, if available) self._do_raise_for_status(res) return closables, res.iter_content(chunk_size=chunk_size) except: for closable in closables: closable.close() raise
def _error_catcher(self): """ Catch low-level python exceptions, instead re-raising urllib3 variants, so that low-level exceptions are not leaked in the high-level api. On exit, release the connection back to the pool. """ try: try: yield except SocketTimeout: # FIXME: Ideally we'd like to include the url in the # ReadTimeoutError but there is yet no clean way to # get at it from this context. raise exceptions.ReadTimeoutError( self._pool, None, 'Read timed out.') except connection.BaseSSLError as e: # FIXME: Is there a better way to differentiate between SSLErrors? if 'read operation timed out' not in str(e): # Defensive: # This shouldn't happen but just in case we're missing an edge # case, let's avoid swallowing SSL errors. raise raise exceptions.ReadTimeoutError( self._pool, None, 'Read timed out.') except connection.HTTPException as e: # This includes IncompleteRead. raise exceptions.ProtocolError('Connection broken: %r' % e, e) except Exception: # The response may not be closed but we're not going to use it anymore # so close it now to ensure that the connection is released back to the # pool. if self._original_response and not self._original_response.isclosed(): self._original_response.close() # Before returning the socket, close it. From the server's # point of view, # this socket is in the middle of handling an SSL handshake/HTTP # request so it we were to try and re-use the connection later, # we'd see undefined behaviour. # # Still return the connection to the pool (it will be # re-established next time it is used). self._connection.close() raise finally: if self._original_response and self._original_response.isclosed(): self.release_conn()