def release(self, url, conn): scheme, host = util.urlsplit(url, 'http', False)[:2] self.lock.acquire() try: self.conns.setdefault((scheme, host), []).append(conn) finally: self.lock.release()
def test_changes_releases_conn(self): # Consume an entire changes feed to read the whole response, then check # that the HTTP connection made it to the pool. list(self.db.changes(feed='continuous', timeout=0)) scheme, netloc = util.urlsplit(client.DEFAULT_BASE_URL)[:2] self.assertTrue( self.db.resource.session.connection_pool.conns[(scheme, netloc)])
def get(self, url): scheme, host = util.urlsplit(url, 'http', False)[:2] # Try to reuse an existing connection. self.lock.acquire() try: conns = self.conns.setdefault((scheme, host), []) if conns: conn = conns.pop(-1) else: conn = None finally: self.lock.release() # Create a new connection if nothing was available. if conn is None: if scheme == 'http': cls = HTTPConnection elif scheme == 'https': cls = HTTPSConnection else: raise ValueError('%s is not a supported scheme' % scheme) conn = cls(host, timeout=self.timeout) conn.connect() return conn
def get(self, url): scheme, host = util.urlsplit(url, 'http', False)[:2] # Try to reuse an existing connection. self.lock.acquire() try: conns = self.conns.setdefault((scheme, host), []) if conns: conn = conns.pop(-1) else: conn = None finally: self.lock.release() # Create a new connection if nothing was available. if conn is None: if scheme == 'http': cls = HTTPConnection elif scheme == 'https': cls = HTTPSConnection else: raise ValueError('%s is not a supported scheme' % scheme) #print "We have selected a",cls,"from"#,cls.__class__ conn = cls(host, timeout=self.timeout) #import cookielib #cf = cookielib.FileCookieJar('/afs/cern.ch/user/v/vlimant/private/ct-cookie.txt') conn.connect() return conn
def test_changes_releases_conn_when_lastseq(self): # Consume a changes feed, stopping at the 'last_seq' item, i.e. don't # let the generator run any further, then check the connection made it # to the pool. for obj in self.db.changes(feed='continuous', timeout=0): if 'last_seq' in obj: break scheme, netloc = util.urlsplit(client.DEFAULT_BASE_URL)[:2] self.assertTrue(self.db.resource.session.connection_pool.conns[(scheme, netloc)])
def test_changes_releases_conn_when_lastseq(self): # Consume a changes feed, stopping at the 'last_seq' item, i.e. don't # let the generator run any further, then check the connection made it # to the pool. for obj in self.db.changes(feed='continuous', timeout=0): if 'last_seq' in obj: break scheme, netloc = util.urlsplit(client.DEFAULT_BASE_URL)[:2] self.assertTrue( self.db.resource.session.connection_pool.conns[(scheme, netloc)])
def extract_credentials(url): """Extract authentication (user name and password) credentials from the given URL. >>> extract_credentials('http://*****:*****@localhost:5984/_config/') ('http://*****:*****@localhost:5984/_config/') ('http://*****:*****@example.com', 'secret')) """ parts = util.urlsplit(url) netloc = parts[1] if '@' in netloc: creds, netloc = netloc.split('@') credentials = tuple(util.urlunquote(i) for i in creds.split(':')) parts = list(parts) parts[1] = netloc else: credentials = None return util.urlunsplit(parts), credentials
def test_changes_releases_conn(self): # Consume an entire changes feed to read the whole response, then check # that the HTTP connection made it to the pool. list(self.db.changes(feed='continuous', timeout=0)) scheme, netloc = util.urlsplit(client.DEFAULT_BASE_URL)[:2] self.assertTrue(self.db.resource.session.connection_pool.conns[(scheme, netloc)])
def request(self, method, url, body=None, headers=None, credentials=None, num_redirects=0): if url in self.perm_redirects: url = self.perm_redirects[url] method = method.upper() if headers is None: headers = {} headers.setdefault('Accept', 'application/json') headers['User-Agent'] = self.user_agent cached_resp = None if method in ('GET', 'HEAD'): cached_resp = self.cache.get(url) if cached_resp is not None: etag = cached_resp[1].get('etag') if etag: headers['If-None-Match'] = etag if (body is not None and not isinstance(body, util.strbase) and not hasattr(body, 'read')): body = json.encode(body).encode('utf-8') headers.setdefault('Content-Type', 'application/json') if body is None: headers.setdefault('Content-Length', '0') elif isinstance(body, util.strbase): headers.setdefault('Content-Length', str(len(body))) else: headers['Transfer-Encoding'] = 'chunked' authorization = basic_auth(credentials) if authorization: headers['Authorization'] = authorization path_query = util.urlunsplit(('', '') + util.urlsplit(url)[2:4] + ('',)) conn = self.connection_pool.get(url) def _try_request_with_retries(retries): while True: try: return _try_request() except socket.error as e: ecode = e.args[0] if ecode not in self.retryable_errors: raise try: delay = retries.next() except StopIteration: # No more retries, raise last socket error. raise e time.sleep(delay) conn.close() def _try_request(): try: conn.putrequest(method, path_query, skip_accept_encoding=True) for header in headers: conn.putheader(header, headers[header]) if body is None: conn.endheaders() else: if isinstance(body, util.strbase): if isinstance(body, util.utype): conn.endheaders(body.encode('utf-8')) else: conn.endheaders(body) else: # assume a file-like object and send in chunks conn.endheaders() while 1: chunk = body.read(CHUNK_SIZE) if not chunk: break if isinstance(chunk, util.utype): chunk = chunk.encode('utf-8') status = ('%x\r\n' % len(chunk)).encode('utf-8') conn.send(status + chunk + b'\r\n') conn.send(b'0\r\n\r\n') return conn.getresponse() except BadStatusLine as e: # httplib raises a BadStatusLine when it cannot read the status # line saying, "Presumably, the server closed the connection # before sending a valid response." # Raise as ECONNRESET to simplify retry logic. if e.line == '' or e.line == "''": raise socket.error(errno.ECONNRESET) else: raise resp = _try_request_with_retries(iter(self.retry_delays)) status = resp.status # Handle conditional response if status == 304 and method in ('GET', 'HEAD'): resp.read() self.connection_pool.release(url, conn) status, msg, data = cached_resp if data is not None: data = util.StringIO(data) return status, msg, data elif cached_resp: self.cache.remove(url) # Handle redirects if status == 303 or \ method in ('GET', 'HEAD') and status in (301, 302, 307): resp.read() self.connection_pool.release(url, conn) if num_redirects > self.max_redirects: raise RedirectLimit('Redirection limit exceeded') location = resp.getheader('location') if status == 301: self.perm_redirects[url] = location elif status == 303: method = 'GET' return self.request(method, location, body, headers, num_redirects=num_redirects + 1) data = None streamed = False # Read the full response for empty responses so that the connection is # in good state for the next request if method == 'HEAD' or resp.getheader('content-length') == '0' or \ status < 200 or status in (204, 304): resp.read() self.connection_pool.release(url, conn) # Buffer small non-JSON response bodies elif int(resp.getheader('content-length', sys.maxsize)) < CHUNK_SIZE: data = resp.read() self.connection_pool.release(url, conn) # For large or chunked response bodies, do not buffer the full body, # and instead return a minimal file-like object else: data = ResponseBody(resp, self.connection_pool, url, conn) streamed = True # Handle errors if status >= 400: ctype = resp.getheader('content-type') if data is not None and 'application/json' in ctype: data = json.decode(data.decode('utf-8')) error = data.get('error'), data.get('reason') elif method != 'HEAD': error = resp.read() self.connection_pool.release(url, conn) else: error = '' if status == 401: raise Unauthorized(error) elif status == 404: raise ResourceNotFound(error) elif status == 409: raise ResourceConflict(error) elif status == 412: raise PreconditionFailed(error) else: raise ServerError((status, error)) # Store cachable responses if not streamed and method == 'GET' and 'etag' in resp.msg: self.cache.put(url, (status, resp.msg, data)) if not streamed and data is not None: data = util.StringIO(data) return status, resp.msg, data
def request(self, method, url, body=None, headers=None, credentials=None, num_redirects=0): if url in self.perm_redirects: url = self.perm_redirects[url] method = method.upper() if headers is None: headers = {} headers.setdefault('Accept', 'application/json') headers['User-Agent'] = self.user_agent cached_resp = None if method in ('GET', 'HEAD'): cached_resp = self.cache.get(url) if cached_resp is not None: etag = cached_resp[1].get('etag') if etag: headers['If-None-Match'] = etag if (body is not None and not isinstance(body, util.strbase) and not hasattr(body, 'read')): body = json.encode(body).encode('utf-8') headers.setdefault('Content-Type', 'application/json') if body is None: headers.setdefault('Content-Length', '0') elif isinstance(body, util.strbase): headers.setdefault('Content-Length', str(len(body))) else: headers['Transfer-Encoding'] = 'chunked' authorization = basic_auth(credentials) if authorization: headers['Authorization'] = authorization path_query = util.urlunsplit(('', '') + util.urlsplit(url)[2:4] + ('', )) conn = self.connection_pool.get(url) def _try_request_with_retries(retries): while True: try: return _try_request() except socket.error as e: ecode = e.args[0] if ecode not in self.retryable_errors: raise try: delay = next(retries) except StopIteration: # No more retries, raise last socket error. raise e finally: time.sleep(delay) conn.close() def _try_request(): try: conn.putrequest(method, path_query, skip_accept_encoding=True) for header in headers: conn.putheader(header, headers[header]) if body is None: conn.endheaders() else: if isinstance(body, util.strbase): if isinstance(body, util.utype): conn.endheaders(body.encode('utf-8')) else: conn.endheaders(body) else: # assume a file-like object and send in chunks conn.endheaders() while 1: chunk = body.read(CHUNK_SIZE) if not chunk: break if isinstance(chunk, util.utype): chunk = chunk.encode('utf-8') status = ('%x\r\n' % len(chunk)).encode('utf-8') conn.send(status + chunk + b'\r\n') conn.send(b'0\r\n\r\n') return conn.getresponse() except BadStatusLine as e: # httplib raises a BadStatusLine when it cannot read the status # line saying, "Presumably, the server closed the connection # before sending a valid response." # Raise as ECONNRESET to simplify retry logic. if e.line == '' or e.line == "''": raise socket.error(errno.ECONNRESET) else: raise resp = _try_request_with_retries(iter(self.retry_delays)) status = resp.status # Handle conditional response if status == 304 and method in ('GET', 'HEAD'): resp.read() self.connection_pool.release(url, conn) status, msg, data = cached_resp if data is not None: data = util.StringIO(data) return status, msg, data elif cached_resp: self.cache.remove(url) # Handle redirects if status == 303 or \ method in ('GET', 'HEAD') and status in (301, 302, 307): resp.read() self.connection_pool.release(url, conn) if num_redirects > self.max_redirects: raise RedirectLimit('Redirection limit exceeded') location = resp.getheader('location') if status == 301: self.perm_redirects[url] = location elif status == 303: method = 'GET' return self.request(method, location, body, headers, num_redirects=num_redirects + 1) data = None streamed = False # Read the full response for empty responses so that the connection is # in good state for the next request if method == 'HEAD' or resp.getheader('content-length') == '0' or \ status < 200 or status in (204, 304): resp.read() self.connection_pool.release(url, conn) # Buffer small non-JSON response bodies elif int(resp.getheader('content-length', sys.maxsize)) < CHUNK_SIZE: data = resp.read() self.connection_pool.release(url, conn) # For large or chunked response bodies, do not buffer the full body, # and instead return a minimal file-like object else: data = ResponseBody(resp, self.connection_pool, url, conn) streamed = True # Handle errors if status >= 400: ctype = resp.getheader('content-type') if data is not None and 'application/json' in ctype: data = json.decode(data.decode('utf-8')) error = data.get('error'), data.get('reason') elif method != 'HEAD': error = resp.read() self.connection_pool.release(url, conn) else: error = '' if status == 401: raise Unauthorized(error) elif status == 404: raise ResourceNotFound(error) elif status == 409: raise ResourceConflict(error) elif status == 412: raise PreconditionFailed(error) else: raise ServerError((status, error)) # Store cachable responses if not streamed and method == 'GET' and 'etag' in resp.msg: self.cache.put(url, (status, resp.msg, data)) if not streamed and data is not None: data = util.StringIO(data) return status, resp.msg, data