def maybe_rewind(self, msg=""): if self.body is not None: if not hasattr(self.body, 'seek') and \ not isinstance(self.body, types.StringTypes): raise RequestError("error: '%s', body can't be rewind." % msg) if log.isEnabledFor(logging.DEBUG): log.debug("restart request: %s" % msg)
def set_body(self, body, headers, chunked=False): """ set HTTP body and manage form if needed """ content_type = headers.get('CONTENT-TYPE') content_length = headers.get('CONTENT-LENGTH') if not body: if content_type is not None: self.bheaders.append(('Content-Type', content_type)) if self.method in ('POST', 'PUT'): self.bheaders.append(("Content-Length", "0")) return # set content lengh if needed if isinstance(body, dict): if content_type is not None and \ content_type.startswith("multipart/form-data"): type_, opts = cgi.parse_header(content_type) boundary = opts.get('boundary', uuid.uuid4().hex) body, self.bheaders = multipart_form_encode(body, self.headers, boundary) else: content_type = "application/x-www-form-urlencoded; charset=utf-8" body = form_encode(body) elif hasattr(body, "boundary"): content_type = "multipart/form-data; boundary=%s" % body.boundary content_length = body.get_size() if not content_type: content_type = 'application/octet-stream' if hasattr(body, 'name'): content_type = mimetypes.guess_type(body.name)[0] if not content_length: if hasattr(body, 'fileno'): try: body.flush() except IOError: pass content_length = str(os.fstat(body.fileno())[6]) elif hasattr(body, 'getvalue'): try: content_length = str(len(body.getvalue())) except AttributeError: pass elif isinstance(body, types.StringTypes): body = util.to_bytestring(body) content_length = len(body) if content_length: self.bheaders.append(("Content-Length", content_length)) if content_type is not None: self.bheaders.append(('Content-Type', content_type)) elif not chunked: raise RequestError("Can't determine content length and " + "Transfer-Encoding header is not chunked") self.body = body
def get_response(self, request, connection): """ return final respons, it is only accessible via peform method """ if log.isEnabledFor(logging.DEBUG): log.debug("Start to parse response") p = HttpStream(SocketReader(connection.socket()), kind=1, decompress=self.decompress) if log.isEnabledFor(logging.DEBUG): log.debug("Got response: %s %s" % (p.version(), p.status())) log.debug("headers: [%s]" % p.headers()) location = p.headers().get('location') if self.follow_redirect: should_close = not p.should_keep_alive() if p.status_code() in (301, 302, 307,): # read full body and release the connection p.body_file().read() connection.release(should_close) if request.method in ('GET', 'HEAD',) or \ self.force_follow_redirect: if hasattr(self.body, 'read'): try: self.body.seek(0) except AttributeError: raise RequestError("Can't redirect %s to %s " "because body has already been read" % (self.url, location)) return self.redirect(location, request) elif p.status_code() == 303 and self.method == "POST": # read full body and release the connection p.body_file().read() connection.release(should_close) request.method = "GET" request.body = None return self.redirect(location, request) # create response object resp = self.response_class(connection, request, p) # apply response filters for f in self.response_filters: f.on_response(resp, request) if log.isEnabledFor(logging.DEBUG): log.debug("return response class") # return final response return resp
def do_send(self): addr = (self.host, self.port) is_ssl = (self.uri.scheme == "https") route = (addr, is_ssl, self.filters, self.ssl_args) self._pool = self.conn_manager.get_pool(route) tries = 2 while True: try: if not self._conn: # get new connection self._conn = self._pool.request() # socket s = self._conn.socket() self.headers = copy.copy(self.bheaders) self.headers.extend(self._conn.headers) # apply on_request filters self.filters.apply("on_request", self, tries) # build request headers req_headers = self._req_headers() self.req_headers = req_headers # send request log.info('Start request: %s %s', self.method, self.url) log.debug("Request headers: [%s]", req_headers) s.sendall("".join(req_headers)) if self.body is not None: if hasattr(self.body, 'read'): if hasattr(self.body, 'seek'): self.body.seek(0) sock.sendfile(s, self.body, self.chunked) elif isinstance(self.body, types.StringTypes): sock.send(s, self.body, self.chunked) else: sock.sendlines(s, self.body, self.chunked) if self.chunked: # final chunk sock.send_chunk(s, "") return self.start_response() except socket.gaierror, e: self.shutdown_connection() raise RequestError(str(e)) except socket.timeout, e: if tries < 0: raise RequestTimeout(str(e)) self.shutdown_connection()
def do_redirect(self, response, location): """ follow redirections if needed""" if self.nb_redirections <= 0: raise RedirectLimit("Redirection limit is reached") if not location: raise RequestError('no Location header') new_uri = urlparse.urlparse(location) if not new_uri.netloc: # we got a relative url absolute_uri = "%s://%s" % (self.uri.scheme, self.uri.netloc) location = urlparse.urljoin(absolute_uri, location) log.debug("Redirect to %s" % location) self.final_url = location response.body.read() self.nb_redirections -= 1 if response.should_close: self._conn.close() self._conn = None return self.request(location, self.method, self.body, self.init_headers)
class Client(object): """ A client handle a connection at a time. A client is threadsafe, but an handled shouldn't be shared between threads. All connections are shared between threads via a pool. >>> from restkit import * >>> c = Client() >>> r = c.request("http://google.com") r>>> r.status '301 Moved Permanently' >>> r.body_string() '<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">\n<TITLE>301 Moved</TITLE></HEAD><BODY>\n<H1>301 Moved</H1>\nThe document has moved\n<A HREF="http://www.google.com/">here</A>.\r\n</BODY></HTML>\r\n' >>> c.follow_redirect = True >>> r = c.request("http://google.com") >>> r.status '200 OK' """ version = (1, 1) response_class = Response def __init__(self, follow_redirect=False, force_follow_redirect=False, max_follow_redirect=MAX_FOLLOW_REDIRECTS, filters=None, decompress=True, max_status_line_garbage=None, max_header_count=0, pool=None, response_class=None, timeout=None, use_proxy=False, max_tries=3, wait_tries=0.3, max_conn=150, pool_size=10, backend="thread", **ssl_args): """ Client parameters ~~~~~~~~~~~~~~~~~ :param follow_redirect: follow redirection, by default False :param max_ollow_redirect: number of redirections available :filters: http filters to pass :param decompress: allows the client to decompress the response body :param max_status_line_garbage: defines the maximum number of ignorable lines before we expect a HTTP response's status line. With HTTP/1.1 persistent connections, the problem arises that broken scripts could return a wrong Content-Length (there are more bytes sent than specified). Unfortunately, in some cases, this cannot be detected after the bad response, but only before the next one. So the client is abble to skip bad lines using this limit. 0 disable garbage collection, None means unlimited number of tries. :param max_header_count: determines the maximum HTTP header count allowed. by default no limit. :param manager: the manager to use. By default we use the global one. :parama response_class: the response class to use :param timeout: the default timeout of the connection (SO_TIMEOUT) :param max_tries: the number of tries before we give up a connection :param wait_tries: number of time we wait between each tries. :param ssl_args: named argument, see ssl module for more informations """ self.follow_redirect = follow_redirect self.force_follow_redirect = force_follow_redirect self.max_follow_redirect = max_follow_redirect self.decompress = decompress self.filters = filters or [] self.max_status_line_garbage = max_status_line_garbage self.max_header_count = max_header_count self.use_proxy = use_proxy self.request_filters = [] self.response_filters = [] self.load_filters() # set manager session_options = dict(retry_delay=wait_tries, max_size=pool_size, max_conn=max_conn, retry_max=max_tries, timeout=timeout) if pool is None: pool = get_session(backend, **session_options) self._pool = pool self.backend = backend # change default response class if response_class is not None: self.response_class = response_class self.max_tries = max_tries self.wait_tries = wait_tries self.max_conn = max_conn self.pool_size = pool_size self.timeout = timeout self._nb_redirections = self.max_follow_redirect self._url = None self._initial_url = None self._write_cb = None self._headers = None self._sock_key = None self._sock = None self._original = None self.method = 'GET' self.body = None self.ssl_args = ssl_args or {} def load_filters(self): """ Populate filters from self.filters. Must be called each time self.filters is updated. """ for f in self.filters: if hasattr(f, "on_request"): self.request_filters.append(f) if hasattr(f, "on_response"): self.response_filters.append(f) def get_connection(self, request): """ get a connection from the pool or create new one. """ addr = parse_netloc(request.parsed_url) is_ssl = request.is_ssl() extra_headers = [] conn = None if self.use_proxy: conn = self.proxy_connection(request, addr, is_ssl) if not conn: conn = self._pool.get(host=addr[0], port=addr[1], pool=self._pool, is_ssl=is_ssl, extra_headers=extra_headers, **self.ssl_args) return conn def proxy_connection(self, request, req_addr, is_ssl): """ do the proxy connection """ proxy_settings = os.environ.get('%s_proxy' % request.parsed_url.scheme) if proxy_settings and proxy_settings is not None: request.is_proxied = True proxy_settings, proxy_auth = _get_proxy_auth(proxy_settings) addr = parse_netloc(urlparse.urlparse(proxy_settings)) if is_ssl: if proxy_auth: proxy_auth = 'Proxy-authorization: %s' % proxy_auth proxy_connect = 'CONNECT %s:%s HTTP/1.0\r\n' % req_addr user_agent = request.headers.iget('user_agent') if not user_agent: user_agent = "User-Agent: restkit/%s\r\n" % __version__ proxy_pieces = '%s%s%s\r\n' % (proxy_connect, proxy_auth, user_agent) conn = self._pool.get(host=addr[0], port=addr[1], pool=self._pool, is_ssl=is_ssl, extra_headers=[], **self.ssl_args) conn.send(proxy_pieces) p = HttpStream(SocketReader(conn.socket()), kind=1, decompress=True) if p.status_code != 200: raise ProxyError("Tunnel connection failed: %d %s" % (resp.status_int, body)) _ = p.body_string() else: headers = [] if proxy_auth: headers = [('Proxy-authorization', proxy_auth)] conn = self._pool.get(host=addr[0], port=addr[1], pool=self._pool, is_ssl=False, extra_headers=[], **self.ssl_args) return conn return def make_headers_string(self, request, extra_headers=None): """ create final header string """ headers = request.headers.copy() if extra_headers is not None: for k, v in extra_headers: headers[k] = v if not request.body and request.method in ( 'POST', 'PUT', ): headers['Content-Length'] = 0 if self.version == (1, 1): httpver = "HTTP/1.1" else: httpver = "HTTP/1.0" ua = headers.iget('user_agent') if not ua: ua = USER_AGENT host = request.host accept_encoding = headers.iget('accept-encoding') if not accept_encoding: accept_encoding = 'identity' if request.is_proxied: full_path = ("https://" if request.is_ssl() else "http://") + request.host + request.path else: full_path = request.path lheaders = [ "%s %s %s\r\n" % (request.method, full_path, httpver), "Host: %s\r\n" % host, "User-Agent: %s\r\n" % ua, "Accept-Encoding: %s\r\n" % accept_encoding ] lheaders.extend(["%s: %s\r\n" % (k, str(v)) for k, v in \ headers.items() if k.lower() not in \ ('user-agent', 'host', 'accept-encoding',)]) if log.isEnabledFor(logging.DEBUG): log.debug("Send headers: %s" % lheaders) return "%s\r\n" % "".join(lheaders) def perform(self, request): """ perform the request. If an error happen it will first try to restart it """ if log.isEnabledFor(logging.DEBUG): log.debug("Start to perform request: %s %s %s" % (request.host, request.method, request.path)) tries = 0 while True: conn = None try: # get or create a connection to the remote host conn = self.get_connection(request) # send headers msg = self.make_headers_string(request, conn.extra_headers) # send body if request.body is not None: chunked = request.is_chunked() if request.headers.iget('content-length') is None and \ not chunked: raise RequestError( "Can't determine content length and " + "Transfer-Encoding header is not chunked") # handle 100-Continue status # http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html#sec8.2.3 hdr_expect = request.headers.iget("expect") if hdr_expect is not None and \ hdr_expect.lower() == "100-continue": conn.send(msg) msg = None p = HttpStream(SocketReader(conn.socket()), kind=1, decompress=True) if p.status_code != 100: self.reset_request() if log.isEnabledFor(logging.DEBUG): log.debug("return response class") return self.response_class(conn, request, p) chunked = request.is_chunked() if log.isEnabledFor(logging.DEBUG): log.debug("send body (chunked: %s)" % chunked) if isinstance(request.body, types.StringTypes): if msg is not None: conn.send(msg + to_bytestring(request.body), chunked) else: conn.send(to_bytestring(request.body), chunked) else: if msg is not None: conn.send(msg) if hasattr(request.body, 'read'): if hasattr(request.body, 'seek'): request.body.seek(0) conn.sendfile(request.body, chunked) else: conn.sendlines(request.body, chunked) if chunked: conn.send_chunk("") else: conn.send(msg) return self.get_response(request, conn) except socket.gaierror, e: if conn is not None: conn.release(True) raise RequestError(str(e)) except socket.timeout, e: if conn is not None: conn.release(True) raise RequestTimeout(str(e)) except socket.error, e: if log.isEnabledFor(logging.DEBUG): log.debug("socket error: %s" % str(e)) if conn is not None: conn.close() if e[0] not in (errno.EAGAIN, errno.EPIPE, errno.EBADF) or \ tries >= self.max_tries: raise RequestError("socket.error: %s" % str(e)) # should raised an exception in other cases request.maybe_rewind(msg=str(e))
def perform(self, request): """ perform the request. If an error happen it will first try to restart it """ if log.isEnabledFor(logging.DEBUG): log.debug("Start to perform request: %s %s %s" % (request.host, request.method, request.path)) tries = 0 while True: conn = None try: # get or create a connection to the remote host conn = self.get_connection(request) # send headers msg = self.make_headers_string(request, conn.extra_headers) # send body if request.body is not None: chunked = request.is_chunked() if request.headers.iget('content-length') is None and \ not chunked: raise RequestError( "Can't determine content length and " + "Transfer-Encoding header is not chunked") # handle 100-Continue status # http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html#sec8.2.3 hdr_expect = request.headers.iget("expect") if hdr_expect is not None and \ hdr_expect.lower() == "100-continue": conn.send(msg) msg = None p = HttpStream(SocketReader(conn.socket()), kind=1, decompress=True) if p.status_code != 100: self.reset_request() if log.isEnabledFor(logging.DEBUG): log.debug("return response class") return self.response_class(conn, request, p) chunked = request.is_chunked() if log.isEnabledFor(logging.DEBUG): log.debug("send body (chunked: %s)" % chunked) if isinstance(request.body, types.StringTypes): if msg is not None: conn.send(msg + to_bytestring(request.body), chunked) else: conn.send(to_bytestring(request.body), chunked) else: if msg is not None: conn.send(msg) if hasattr(request.body, 'read'): if hasattr(request.body, 'seek'): request.body.seek(0) conn.sendfile(request.body, chunked) else: conn.sendlines(request.body, chunked) if chunked: conn.send_chunk("") else: conn.send(msg) return self.get_response(request, conn) except socket.gaierror, e: if conn is not None: conn.release(True) raise RequestError(str(e)) except socket.timeout, e: if conn is not None: conn.release(True) raise RequestTimeout(str(e))
def perform(self, request): """ perform the request. If an error happen it will first try to restart it """ if log.isEnabledFor(logging.DEBUG): log.debug("Start to perform request: %s %s %s" % (request.host, request.method, request.path)) tries = 0 while True: conn = None try: # get or create a connection to the remote host conn = self.get_connection(request) # send headers msg = self.make_headers_string(request, conn.extra_headers) # send body if request.body is not None: chunked = request.is_chunked() if request.headers.iget('content-length') is None and \ not chunked: raise RequestError( "Can't determine content length and " + "Transfer-Encoding header is not chunked") # handle 100-Continue status # http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html#sec8.2.3 hdr_expect = request.headers.iget("expect") if hdr_expect is not None and \ hdr_expect.lower() == "100-continue": conn.send(msg) msg = None p = HttpStream(SocketReader(conn.socket()), kind=1, decompress=True) if p.status_code != 100: self.reset_request() if log.isEnabledFor(logging.DEBUG): log.debug("return response class") return self.response_class(conn, request, p) chunked = request.is_chunked() if log.isEnabledFor(logging.DEBUG): log.debug("send body (chunked: %s)" % chunked) if isinstance(request.body, (str, )): if msg is not None: conn.send(msg + to_bytestring(request.body), chunked) else: conn.send(to_bytestring(request.body), chunked) else: if msg is not None: conn.send(msg) if hasattr(request.body, 'read'): if hasattr(request.body, 'seek'): request.body.seek(0) conn.sendfile(request.body, chunked) else: conn.sendlines(request.body, chunked) if chunked: conn.send_chunk("") else: conn.send(msg) return self.get_response(request, conn) except socket.gaierror as e: if conn is not None: conn.release(True) raise RequestError(str(e)) except socket.timeout as e: if conn is not None: conn.release(True) raise RequestTimeout(str(e)) except socket.error as e: if log.isEnabledFor(logging.DEBUG): log.debug("socket error: %s" % str(e)) if conn is not None: conn.close() errors = (errno.EAGAIN, errno.EPIPE, errno.EBADF, errno.ECONNRESET) if e[0] not in errors or tries >= self.max_tries: raise RequestError("socket.error: %s" % str(e)) # should raised an exception in other cases request.maybe_rewind(msg=str(e)) except NoMoreData as e: if conn is not None: conn.release(True) request.maybe_rewind(msg=str(e)) if tries >= self.max_tries: raise except BadStatusLine: if conn is not None: conn.release(True) # should raised an exception in other cases request.maybe_rewind(msg="bad status line") if tries >= self.max_tries: raise except Exception: # unkown error log.debug("unhandled exception %s" % traceback.format_exc()) if conn is not None: conn.release(True) raise tries += 1 self._pool.backend_mod.sleep(self.wait_tries)
def maybe_rewind(self, msg=""): if self.body is not None: if not hasattr(self.body, 'seek') and \ not isinstance(self.body, types.StringTypes): raise RequestError("error: '%s', body can't be rewind." % msg)
class HttpRequest(object): """ Http Connection object. """ version = (1, 1) response_class = HttpResponse def __init__(self, timeout=DEFAULT_CONN_TIMEOUT, filters=None, follow_redirect=False, force_follow_redirect=False, max_follow_redirect=MAX_FOLLOW_REDIRECTS, decompress=True, pool_instance=None, response_class=None, conn_manager=None, nb_connections=DEFAULT_CONN_NB_CONNECTIONS, **ssl_args): """ HttpRequest constructor :param timeout: socket timeout :param filters: list, list of http filters. see the doc of http filters for more info :param follow_redirect: boolean, by default is false. If true, if the HTTP status is 301, 302 or 303 the client will follow the location. :param max_follow_redirect: max number of redirection. If max is reached the RedirectLimit exception is raised. :param conn_manager: a connectoin manager instance inherited from `restkit.conn.base.ConnectioManager` :param ssl_args: ssl arguments. See http://docs.python.org/library/ssl.html for more information. """ self._conn = None self.timeout = timeout self.headers = [] self.req_headers = [] self.ua = USER_AGENT self.url = None self.follow_redirect = follow_redirect self.nb_redirections = max_follow_redirect self.force_follow_redirect = force_follow_redirect self.decompress = decompress self.method = 'GET' self.body = None self.response_body = StringIO() self.final_url = None # build filter lists self.filters = Filters(filters) self.ssl_args = ssl_args or {} if pool_instance is not None: self.conn_manager = pool_instance elif conn_manager is not None: self.conn_manager = conn_manager else: self.conn_manager = get_default_manager( timeout=timeout, nb_connections=nb_connections ) if response_class is not None: self.response_class = response_class def parse_url(self, url): """ parse url and get host/port""" self.uri = urlparse.urlparse(url) if self.uri.scheme not in ('http', 'https'): raise InvalidUrl("None valid url") host, port = util.parse_netloc(self.uri) self.host = host self.port = port def set_body(self, body, headers, chunked=False): """ set HTTP body and manage form if needed """ content_type = headers.get('CONTENT-TYPE') content_length = headers.get('CONTENT-LENGTH') if not body: if content_type is not None: self.bheaders.append(('Content-Type', content_type)) if self.method in ('POST', 'PUT'): self.bheaders.append(("Content-Length", "0")) return # set content lengh if needed if isinstance(body, dict): if content_type is not None and \ content_type.startswith("multipart/form-data"): type_, opts = cgi.parse_header(content_type) boundary = opts.get('boundary', uuid.uuid4().hex) body, self.bheaders = multipart_form_encode(body, self.headers, boundary) else: content_type = "application/x-www-form-urlencoded; charset=utf-8" body = form_encode(body) elif hasattr(body, "boundary"): content_type = "multipart/form-data; boundary=%s" % body.boundary content_length = body.get_size() if not content_type: content_type = 'application/octet-stream' if hasattr(body, 'name'): content_type = mimetypes.guess_type(body.name)[0] if not content_length: if hasattr(body, 'fileno'): try: body.flush() except IOError: pass content_length = str(os.fstat(body.fileno())[6]) elif hasattr(body, 'getvalue'): try: content_length = str(len(body.getvalue())) except AttributeError: pass elif isinstance(body, types.StringTypes): body = util.to_bytestring(body) content_length = len(body) if content_length: self.bheaders.append(("Content-Length", content_length)) if content_type is not None: self.bheaders.append(('Content-Type', content_type)) elif not chunked: raise RequestError("Can't determine content length and " + "Transfer-Encoding header is not chunked") self.body = body def request(self, url, method='GET', body=None, headers=None): """ make effective request :param url: str, url string :param method: str, by default GET. http verbs :param body: the body, could be a string, an iterator or a file-like object :param headers: dict or list of tupple, http headers """ self._conn = None self.url = url self.final_url = url self.parse_url(url) self.method = method.upper() self.init_headers = copy.copy(headers or []) self.bheaders = [] # headers are better as list headers = headers or [] if isinstance(headers, dict): headers = headers.items() chunked = False # normalize headers search_headers = ('USER-AGENT', 'CONTENT-TYPE', 'CONTENT-LENGTH', 'ACCEPT-ENCODING', 'TRANSFER-ENCODING', 'CONNECTION', 'HOST') found_headers = {} new_headers = copy.copy(headers) for (name, value) in headers: uname = name.upper() if uname in search_headers: if uname == 'TRANSFER-ENCODING': if value.lower() == "chunked": chunked = True else: found_headers[uname] = value new_headers.remove((name, value)) self.bheaders = new_headers self.chunked = chunked # set body self.set_body(body, found_headers, chunked=chunked) self.found_headers = found_headers # Finally do the request return self.do_send() def _req_headers(self): # by default all connections are HTTP/1.1 if self.version == (1,1): httpver = "HTTP/1.1" else: httpver = "HTTP/1.0" # request path path = self.uri.path or "/" req_path = urlparse.urlunparse(('','', path, self.uri.params, self.uri.query, self.uri.fragment)) ua = self.found_headers.get('USER-AGENT') accept_encoding = self.found_headers.get('ACCEPT-ENCODING') connection = self.found_headers.get('CONNECTION') # default host header try: host = self.uri.netloc.encode('ascii') except UnicodeEncodeError: host = self.uri.netloc.encode('idna') host = self.found_headers.get('HOST') or host # build final request headers req_headers = [ "%s %s %s\r\n" % (self.method, req_path, httpver), "Host: %s\r\n" % host, "User-Agent: %s\r\n" % ua or USER_AGENT, "Accept-Encoding: %s\r\n" % accept_encoding or 'identity' ] if connection is not None: req_headers.append("Connection: %s\r\n" % connection) req_headers.extend(["%s: %s\r\n" % (k, v) for k, v in self.headers]) req_headers.append('\r\n') return req_headers def shutdown_connection(self): self._pool.shutdown() if not self._conn: return self._conn.close() self._conn = None def do_send(self): addr = (self.host, self.port) is_ssl = (self.uri.scheme == "https") route = (addr, is_ssl, self.filters, self.ssl_args) self._pool = self.conn_manager.get_pool(route) tries = 2 while True: try: if not self._conn: # get new connection self._conn = self._pool.request() # socket s = self._conn.socket() self.headers = copy.copy(self.bheaders) self.headers.extend(self._conn.headers) # apply on_request filters self.filters.apply("on_request", self, tries) # build request headers req_headers = self._req_headers() self.req_headers = req_headers # send request log.info('Start request: %s %s', self.method, self.url) log.debug("Request headers: [%s]", req_headers) s.sendall("".join(req_headers)) if self.body is not None: if hasattr(self.body, 'read'): if hasattr(self.body, 'seek'): self.body.seek(0) sock.sendfile(s, self.body, self.chunked) elif isinstance(self.body, types.StringTypes): sock.send(s, self.body, self.chunked) else: sock.sendlines(s, self.body, self.chunked) if self.chunked: # final chunk sock.send_chunk(s, "") return self.start_response() except socket.gaierror, e: self.shutdown_connection() raise RequestError(str(e)) except socket.timeout, e: if tries < 0: raise RequestTimeout(str(e)) self.shutdown_connection() except socket.error, e: if e[0] not in (errno.EAGAIN, errno.ECONNABORTED, errno.EPIPE, errno.ECONNREFUSED, errno.ECONNRESET) or tries <= 0: self.shutdown_connection() raise RequestError(str(e)) if e[0] in (errno.EPIPE, errno.ECONNRESET): self.shutdown_connection()
def perform(self, request): """ perform the request. If an error happen it will first try to restart it """ if log.isEnabledFor(logging.DEBUG): log.debug("Start to perform request: %s %s %s" % (request.host, request.method, request.path)) tries = self.max_tries wait = self.wait_tries while tries > 0: try: # get or create a connection to the remote host connection = self.get_connection(request) sck = connection.socket() # send headers msg = self.make_headers_string(request, connection.extra_headers) # send body if request.body is not None: chunked = request.is_chunked() if request.headers.iget('content-length') is None and \ not chunked: raise RequestError( "Can't determine content length and " + "Transfer-Encoding header is not chunked") # handle 100-Continue status # http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html#sec8.2.3 hdr_expect = request.headers.iget("expect") if hdr_expect is not None and \ hdr_expect.lower() == "100-continue": sck.sendall(msg) msg = None resp = Request(http.Unreader(self._sock)) if resp.status_int != 100: self.reset_request() if log.isEnabledFor(logging.DEBUG): log.debug("return response class") return self.response_class(connection, request, resp) chunked = request.is_chunked() if log.isEnabledFor(logging.DEBUG): log.debug("send body (chunked: %s)" % chunked) if isinstance(request.body, types.StringTypes): if msg is not None: send(sck, msg + request.body, chunked) else: send(sck, request.body, chunked) else: if msg is not None: sck.sendall(msg) if hasattr(request.body, 'read'): if hasattr(request.body, 'seek'): request.body.seek(0) sendfile(sck, request.body, chunked) else: sendlines(sck, request.body, chunked) if chunked: send_chunk(sck, "") else: sck.sendall(msg) return self.get_response(request, connection) except socket.gaierror, e: try: connection.close() except: pass raise RequestError(str(e)) except socket.timeout, e: try: connection.close() except: pass if tries <= 0: raise RequestTimeout(str(e))
pass if tries <= 0: raise RequestTimeout(str(e)) except socket.error, e: if log.isEnabledFor(logging.DEBUG): log.debug("socket error: %s" % str(e)) try: connection.close() except: pass if e[0] not in (errno.EAGAIN, errno.ECONNABORTED, errno.EPIPE, errno.ECONNREFUSED, errno.ECONNRESET, errno.EBADF) or tries <= 0: raise RequestError("socket.error: %s" % str(e)) except (StopIteration, NoMoreData): connection.close() if tries <= 0: raise else: if request.body is not None: if not hasattr(request.body, 'read') and \ not isinstance(request.body, types.StringTypes): raise RequestError("connection closed and can't" + "be resent") except Exception: # unkown error log.debug("unhandled exception %s" % traceback.format_exc()) raise