def _conn_request(self, conn, request_uri, method, body, headers): for i in range(2): try: if conn.sock is None: conn.connect() conn.request(method, request_uri, body, headers) except socket.timeout: raise except socket.gaierror: conn.close() raise httplib2.ServerNotFoundError( "Unable to find the server at %s" % conn.host) except httplib2.ssl_SSLError: conn.close() raise except socket.error as e: conn.close() raise except http.client.HTTPException: conn.close() raise try: response = conn.getresponse() except http.client.BadStatusLine: print("retry bad line") conn.close() conn.connect() continue except (socket.error, http.client.HTTPException): raise else: content = "" if method == "HEAD": response.close() else: buf = StringIO() while 1: data = response.read(CHUNK_SIZE) if not data: break buf.write(data) runHook("httpRecv", len(data)) content = buf.getvalue() response = httplib2.Response(response) if method != "HEAD": content = httplib2._decompressContent(response, content) return (response, content)
def _conn_request(self, conn, request_uri, method, body, headers): # pylint: disable=too-many-statements try: if hasattr(conn, 'sock') and conn.sock is None: conn.connect() conn.request(method, request_uri, body, headers) except socket.timeout: raise except socket.gaierror: conn.close() raise httplib2.ServerNotFoundError('Unable to find the server at %s' % conn.host) except httplib2.ssl.SSLError: conn.close() raise except socket.error as e: err = 0 if hasattr(e, 'args'): err = getattr(e, 'args')[0] else: err = e.errno if err == httplib2.errno.ECONNREFUSED: # Connection refused raise except http_client.HTTPException: conn.close() raise try: response = conn.getresponse() except (socket.error, http_client.HTTPException): conn.close() raise else: content = '' if method == 'HEAD': conn.close() else: content = response.read() response = httplib2.Response(response) if method != 'HEAD': # pylint: disable=protected-access content = httplib2._decompressContent(response, content) return (response, content)
def read(self): return httplib2._decompressContent(response, realbody)
raise try: response = conn.getresponse() except (socket.error, httplib.HTTPException): conn.close() raise else: content = '' if method == 'HEAD': conn.close() else: content = response.read() response = httplib2.Response(response) if method != 'HEAD': # pylint: disable=protected-access content = httplib2._decompressContent(response, content) return (response, content) class HttpWithDownloadStream(httplib2.Http): """httplib2.Http variant that only pushes bytes through a stream. httplib2 handles media by storing entire chunks of responses in memory, which is undesirable particularly when multiple instances are used during multi-threaded/multi-process copy. This class copies and then overrides some httplib2 functions to use a streaming copy approach that uses small memory buffers. Also disables httplib2 retries (for reasons stated in the HttpWithNoRetries class doc). """
pass try: response = conn.getresponse() except (socket.error, httplib.HTTPException): if i == 0: conn.close() conn.connect() continue else: raise else: content = "" if method == "HEAD": response.close() else: buf = StringIO() while 1: data = response.read(CHUNK_SIZE) if not data: break buf.write(data) runHook("httpRecv", len(data)) content = buf.getvalue() response = httplib2.Response(response) if method != "HEAD": content = httplib2._decompressContent(response, content) break return (response, content) httplib2.Http._conn_request = _conn_request
def _conn_request(self, conn, request_uri, method, body, headers): try: if hasattr(conn, 'sock') and conn.sock is None: conn.connect() conn.request(method, request_uri, body, headers) except socket.timeout: raise except socket.gaierror: conn.close() raise httplib2.ServerNotFoundError('Unable to find the server at %s' % conn.host) except httplib2.ssl.SSLError: conn.close() raise except socket.error as e: err = 0 if hasattr(e, 'args'): err = getattr(e, 'args')[0] else: err = e.errno if err == httplib2.errno.ECONNREFUSED: # Connection refused raise except http_client.HTTPException: # Just because the server closed the connection doesn't apparently mean # that the server didn't send a response. conn.close() raise try: response = conn.getresponse() except (socket.error, http_client.HTTPException) as e: conn.close() raise else: content = '' if method == 'HEAD': conn.close() response = httplib2.Response(response) elif method == 'GET' and response.status in (http_client.OK, http_client.PARTIAL_CONTENT): content_length = None if hasattr(response, 'msg'): content_length = response.getheader('content-length') http_stream = response bytes_read = 0 while True: new_data = http_stream.read(TRANSFER_BUFFER_SIZE) if new_data: if self.stream is None: raise apitools_exceptions.InvalidUserInputError( 'Cannot exercise HttpWithDownloadStream with no stream') text_util.write_to_fd(self.stream, new_data) bytes_read += len(new_data) else: break if (content_length is not None and long(bytes_read) != long(content_length)): # The input stream terminated before we were able to read the # entire contents, possibly due to a network condition. Set # content-length to indicate how many bytes we actually read. self._logger.log( logging.DEBUG, 'Only got %s bytes out of content-length %s ' 'for request URI %s. Resetting content-length to match ' 'bytes read.', bytes_read, content_length, request_uri) response.msg['content-length'] = str(bytes_read) response = httplib2.Response(response) else: # We fall back to the current httplib2 behavior if we're # not processing download bytes, e.g., it's a redirect, an # oauth2client POST to refresh an access token, or any HTTP # status code that doesn't include object content. content = response.read() response = httplib2.Response(response) # pylint: disable=protected-access content = httplib2._decompressContent(response, content) return (response, content)
class HttpWithDownloadStream(httplib2.Http): """httplib2.Http variant that only pushes bytes through a stream. httplib2 handles media by storing entire chunks of responses in memory, which is undesirable particularly when multiple instances are used during multi-threaded/multi-process copy. This class copies and then overrides some httplib2 functions to use a streaming copy approach that uses small memory buffers. """ def __init__(self, stream=None, *args, **kwds): if stream is None: raise apitools_exceptions.InvalidUserInputError( 'Cannot create HttpWithDownloadStream with no stream') self._stream = stream self._logger = logging.getLogger() super(HttpWithDownloadStream, self).__init__(*args, **kwds) @property def stream(self): return self._stream # pylint: disable=too-many-statements def _conn_request(self, conn, request_uri, method, body, headers): i = 0 seen_bad_status_line = False while i < httplib2.RETRIES: i += 1 try: if hasattr(conn, 'sock') and conn.sock is None: conn.connect() conn.request(method, request_uri, body, headers) except socket.timeout: raise except socket.gaierror: conn.close() raise httplib2.ServerNotFoundError( 'Unable to find the server at %s' % conn.host) except httplib2.ssl_SSLError: conn.close() raise except socket.error, e: err = 0 if hasattr(e, 'args'): err = getattr(e, 'args')[0] else: err = e.errno if err == httplib2.errno.ECONNREFUSED: # Connection refused raise except httplib.HTTPException: # Just because the server closed the connection doesn't apparently mean # that the server didn't send a response. if hasattr(conn, 'sock') and conn.sock is None: if i < httplib2.RETRIES - 1: conn.close() conn.connect() continue else: conn.close() raise if i < httplib2.RETRIES - 1: conn.close() conn.connect() continue try: response = conn.getresponse() except httplib.BadStatusLine: # If we get a BadStatusLine on the first try then that means # the connection just went stale, so retry regardless of the # number of RETRIES set. if not seen_bad_status_line and i == 1: i = 0 seen_bad_status_line = True conn.close() conn.connect() continue else: conn.close() raise except (socket.error, httplib.HTTPException): if i < httplib2.RETRIES - 1: conn.close() conn.connect() continue else: conn.close() raise else: content = '' if method == 'HEAD': conn.close() response = httplib2.Response(response) else: if response.status in (httplib.OK, httplib.PARTIAL_CONTENT): content_length = None if hasattr(response, 'msg'): content_length = response.getheader( 'content-length') http_stream = response bytes_read = 0 while True: new_data = http_stream.read(TRANSFER_BUFFER_SIZE) if new_data: self.stream.write(new_data) bytes_read += len(new_data) else: break if (content_length is not None and long(bytes_read) != long(content_length)): # The input stream terminated before we were able to read the # entire contents, possibly due to a network condition. Set # content-length to indicate how many bytes we actually read. self._logger.log( logging.DEBUG, 'Only got %s bytes out of content-length %s ' 'for request URI %s. Resetting content-length to match ' 'bytes read.', bytes_read, content_length, request_uri) response.msg['content-length'] = str(bytes_read) response = httplib2.Response(response) else: # We fall back to the current httplib2 behavior if we're # not processing bytes (eg it's a redirect). content = response.read() response = httplib2.Response(response) # pylint: disable=protected-access content = httplib2._decompressContent( response, content) break
class LimitingBodyHttp(httplib2.Http): """ This is a patched form of httplib2's Http class that is designed to reject reading response bodies that are too large to handle. By default httplib2 simply reads the whole body of the response into memory. This will read at most a certain size. """ def __init__(self, max_body_size=1024 * 40, **kw): self.max_body_size = max_body_size self.follow_all_redirects = True super(LimitingBodyHttp, self).__init__(**kw) def _conn_request(self, conn, request_uri, method, body, headers): for i in range(2): try: if conn.sock is None: conn.connect() conn.request(method, request_uri, body, headers) except socket.timeout: raise except socket.gaierror: conn.close() raise httplib2.ServerNotFoundError("Unable to find the server " "at %s" % conn.host) except httplib2.ssl_SSLError: conn.close() raise except socket.error, e: err = 0 if hasattr(e, 'args'): err = getattr(e, 'args')[0] else: err = e.errno if err == errno.ECONNREFUSED: # Connection refused raise except httplib.HTTPException: # Just because the server closed the connection doesn't # apparently mean that the server didn't send a response. if conn.sock is None: if i == 0: conn.close() conn.connect() continue else: conn.close() raise if i == 0: conn.close() conn.connect() continue try: response = conn.getresponse() except (socket.error, httplib.HTTPException): if i == 0: conn.close() conn.connect() continue else: raise else: content = "" if method == "HEAD": response.close() else: content = response.read(self.max_body_size + 1) if len(content) > self.max_body_size: #Too large. Drop the connection on the floor. response.close() conn.close() raise ResponseTooLargeError( "The response was larger than the maximum" " size (%s) allowed" % self.max_body_size, response, content) response = httplib2.Response(response) if method != "HEAD": content = httplib2._decompressContent(response, content) break
class HttpWithDownloadStream(httplib2.Http): """httplib2.Http variant that only pushes bytes through a stream. httplib2 handles media by storing entire chunks of responses in memory, which is undesirable particularly when multiple instances are used during multi-threaded/multi-process copy. This class copies and then overrides some httplib2 functions to use a streaming copy approach that uses small memory buffers. """ def __init__(self, stream=None, *args, **kwds): if stream is None: raise apitools_exceptions.InvalidUserInputError( 'Cannot create HttpWithDownloadStream with no stream') self._stream = stream super(HttpWithDownloadStream, self).__init__(*args, **kwds) @property def stream(self): return self._stream # pylint: disable=too-many-statements def _conn_request(self, conn, request_uri, method, body, headers): i = 0 seen_bad_status_line = False while i < httplib2.RETRIES: i += 1 try: if hasattr(conn, 'sock') and conn.sock is None: conn.connect() conn.request(method, request_uri, body, headers) except socket.timeout: raise except socket.gaierror: conn.close() raise httplib2.ServerNotFoundError( 'Unable to find the server at %s' % conn.host) except httplib2.ssl_SSLError: conn.close() raise except socket.error, e: err = 0 if hasattr(e, 'args'): err = getattr(e, 'args')[0] else: err = e.errno if err == httplib2.errno.ECONNREFUSED: # Connection refused raise except httplib.HTTPException: # Just because the server closed the connection doesn't apparently mean # that the server didn't send a response. if hasattr(conn, 'sock') and conn.sock is None: if i < httplib2.RETRIES - 1: conn.close() conn.connect() continue else: conn.close() raise if i < httplib2.RETRIES - 1: conn.close() conn.connect() continue try: response = conn.getresponse() except httplib.BadStatusLine: # If we get a BadStatusLine on the first try then that means # the connection just went stale, so retry regardless of the # number of RETRIES set. if not seen_bad_status_line and i == 1: i = 0 seen_bad_status_line = True conn.close() conn.connect() continue else: conn.close() raise except (socket.error, httplib.HTTPException): if i < httplib2.RETRIES - 1: conn.close() conn.connect() continue else: conn.close() raise else: content = '' if method == 'HEAD': conn.close() response = httplib2.Response(response) else: if response.status in (httplib.OK, httplib.PARTIAL_CONTENT): http_stream = response # Start last_position and new_position at dummy values last_position = -1 new_position = 0 while new_position != last_position: last_position = new_position new_data = http_stream.read(TRANSFER_BUFFER_SIZE) self.stream.write(new_data) new_position += len(new_data) response = httplib2.Response(response) else: # We fall back to the current httplib2 behavior if we're # not processing bytes (eg it's a redirect). content = response.read() response = httplib2.Response(response) # pylint: disable=protected-access content = httplib2._decompressContent( response, content) break
def _conn_request(self, conn, request_uri, method, body, headers): try: if hasattr(conn, 'sock') and conn.sock is None: conn.connect() conn.request(method, request_uri, body, headers) except socket.timeout: raise except socket.gaierror: conn.close() raise httplib2.ServerNotFoundError('Unable to find the server at %s' % conn.host) except httplib2.ssl.SSLError: conn.close() raise except socket.error as e: err = 0 if hasattr(e, 'args'): err = getattr(e, 'args')[0] else: err = e.errno if err == httplib2.errno.ECONNREFUSED: # Connection refused raise except http_client.HTTPException: # Just because the server closed the connection doesn't apparently mean # that the server didn't send a response. conn.close() raise try: response = conn.getresponse() except (socket.error, http_client.HTTPException) as e: conn.close() raise else: content = '' if method == 'HEAD': conn.close() response = httplib2.Response(response) elif method == 'GET' and response.status in (http_client.OK, http_client.PARTIAL_CONTENT): content_length = None if hasattr(response, 'msg'): content_length = response.getheader('content-length') http_stream = response bytes_read = 0 while True: new_data = http_stream.read(TRANSFER_BUFFER_SIZE) if new_data: if self.stream is None: raise apitools_exceptions.InvalidUserInputError( 'Cannot exercise HttpWithDownloadStream with no stream') text_util.write_to_fd(self.stream, new_data) bytes_read += len(new_data) else: break if (content_length is not None and long(bytes_read) != long(content_length)): # The input stream terminated before we were able to read the # entire contents, possibly due to a network condition. Set # content-length to indicate how many bytes we actually read. self._logger.log( logging.DEBUG, 'Only got %s bytes out of content-length %s ' 'for request URI %s. Resetting content-length to match ' 'bytes read.', bytes_read, content_length, request_uri) # Failing to delete existing headers before setting new values results # in the header being set twice, see https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__. # This trips apitools up when executing a retry, so the line below is # essential: del response.msg['content-length'] response.msg['content-length'] = str(bytes_read) response = httplib2.Response(response) else: # We fall back to the current httplib2 behavior if we're # not processing download bytes, e.g., it's a redirect, an # oauth2client POST to refresh an access token, or any HTTP # status code that doesn't include object content. content = response.read() response = httplib2.Response(response) # pylint: disable=protected-access content = httplib2._decompressContent(response, content) return (response, content)