def connect(self, addr, is_ssl): """ create a socket """ if log.isEnabledFor(logging.DEBUG): log.debug("create new connection") for res in socket.getaddrinfo(addr[0], addr[1], 0, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: sck = socket.socket(af, socktype, proto) if self.timeout is not None: sck.settimeout(self.timeout) sck.connect(sa) if is_ssl: if not have_ssl: raise ValueError("https isn't supported. On python 2.5x," + " https support requires ssl module " + "(http://pypi.python.org/pypi/ssl) " + "to be intalled.") validate_ssl_args(self.ssl_args) sck = _ssl_wrapper(sck, **self.ssl_args) return sck except socket.error, ex: if ex == 24: # too many open files raise else: close(sck)
def close_connections(self): self._lock.acquire() try: active_sockets = self.active_sockets.copy() for fno, (sock, t0) in active_sockets.items(): close(sock) del self.active_sockets[fno] finally: self._lock.release()
def clean(self, address): self._lock.acquire() try: host = self.hosts.get(address) if not host: return while host.pool: socket = host.pool.popleft() sock.close(socket) finally: self._lock.release()
def murder_connections(self, *args): self._lock.acquire() try: active_sockets = self.active_sockets.copy() for fno, (sock, t0, k) in active_sockets.items(): diff = time.time() - t0 if diff <= self.timeout: continue close(sock) del self.active_sockets[fno] self.connections_count[k] -= 1 finally: self._lock.release()
def put(self, address, socket): self._lock.acquire() try: host = self.hosts.get(address) if not host: host = _Host(address) if len(host.pool) > self.max_connections: sock.close(socket) return host.pool.append(socket) finally: self._lock.release()
def clean(self, address): """ close all sockets in the pool for this address :param address: tuple (Host, address) """ host = self.hosts.get(address) if not host: return if host.free_connections: while host.free_connections: socket = host.free_connections.popleft() sock.close(socket) while host.nb_connections: socket = host.pool.get() sock.close(socket) host.nb_connections -= 1
def put(self, address, socket): """ release socket in the pool :param address: tuple (Host, address) :param socket: a socket object """ host = self.hosts.get(address) if not host: host = _Host(address) if host.nb_connections > self.max_connections: sock.close(socket) host.nb_connections -= 1 elif host.waiting(): host.pool.put(socket) self.monitor_socket(socket) else: host.free_connections.append(socket) self.monitor_socket(socket)
def store_socket(self, sck, addr, ssl=False): """ store a socket in the pool to reuse it across threads """ if self._reaper is not None: self._reaper.ensure_started() self._lock.acquire() try: key = (addr, ssl) try: socks = self.sockets[key] except KeyError: socks = deque() if len(socks) < self.max_conn: # add connection to the pool try: fno = sck.fileno() except ( socket.error, AttributeError, ): # socket has been closed return self.active_sockets[fno] = (sck, time.time(), key) socks.appendleft((fno, sck)) self.sockets[key] = socks try: self.connections_count[key] += 1 except KeyError: self.connections_count[key] = 1 else: # close connection if we have enough connections in the # pool. close(sck) finally: self._lock.release()
def do_redirect(self): """ follow redirections if needed""" if self.nb_redirections <= 0: raise RedirectLimit("Redirection limit is reached") location = self.parser.headers_dict.get('Location') if not location: raise RequestError('no Location header') new_uri = urlparse.urlparse(location) if not new_uri.netloc: # we got a relative url absolute_uri = "%s://%s" % (self.uri.scheme, self.uri.netloc) location = urlparse.urljoin(absolute_uri, location) log.debug("Redirect to %s" % location) self.final_url = location self.response_body.read() self.nb_redirections -= 1 sock.close(self._sock) return self.request(location, self.method, self.body, self.headers)
def store_socket(self, sck, addr, ssl=False): """ store a socket in the pool to reuse it across threads """ if self._reaper is not None: self._reaper.ensure_started() self._lock.acquire() try: key = (addr, ssl) try: socks = self.sockets[key] except KeyError: socks = deque() if len(socks) < self.max_conn: # add connection to the pool try: fno = sck.fileno() except (socket.error, AttributeError,): # socket has been closed return self.active_sockets[fno] = (sck, time.time(), key) socks.appendleft((fno, sck)) self.sockets[key] = socks try: self.connections_count[key] += 1 except KeyError: self.connections_count[key] = 1 else: # close connection if we have enough connections in the # pool. close(sck) finally: self._lock.release()
def clean_connections(self): sock.close(self._sock) if hasattr(self.connections,'clean'): self.connections.clean((self.host, self.port))
def close(self): if log.isEnabledFor(logging.DEBUG): log.debug("close connection") sock.close(self._sock)
def release_connection(self, address, socket): if not self.connections: sock.close(socket) else: self.connections.put(address, self._sock)
def start_response(self): """ Get headers, set Body object and return HttpResponse """ # read headers headers = [] buf = StringIO() data = self._sock.recv(sock.CHUNK_SIZE) buf.write(data) buf2 = self.parser.filter_headers(headers, buf) if not buf2: while True: data = self._sock.recv(sock.CHUNK_SIZE) if not data: break buf.write(data) buf2 = self.parser.filter_headers(headers, buf) if buf2: break log.debug("Start response: %s" % str(self.parser.status_line)) log.debug("Response headers: [%s]" % str(self.parser.headers)) if self.method == "HEAD": self.response_body = tee.TeeInput(self._sock, self.parser, StringIO()) self.response_body._is_socket = False sock.close(self._sock) elif (not self.parser.content_len and not self.parser.is_chunked): if self.parser.should_close: # http 1.0 or something like it. # we try to get missing body log.debug("No content len an not chunked transfer, get body") while True: try: chunk = self._sock.recv(sock.CHUNK_SIZE) except socket.error: break if not chunk: break buf2.write(chunk) sock.close(self._sock) buf2.seek(0) self.response_body = tee.TeeInput(self._sock, self.parser, buf2) else: self.response_body = tee.TeeInput(self._sock, self.parser, buf2, maybe_close=lambda: self.release_connection( self.uri.netloc, self._sock)) # apply on response filters for af in self.response_filters: af.on_response(self) if self.follow_redirect: if self.parser.status_int in (301, 302, 307): if self.method in ('GET', 'HEAD') or \ self.force_follow_redirect: if self.method not in ('GET', 'HEAD') and \ hasattr(self.body, 'seek'): self.body.seek(0) return self.do_redirect() elif self.parser.status_int == 303 and self.method in ('GET', 'HEAD'): # only 'GET' is possible with this status # according the rfc return self.do_redirect() self.final_url = self.parser.headers_dict.get('Location', self.final_url) log.debug("Return response: %s" % self.final_url) return self.response_class(self)
def _monitor_socket(self, fn): """ function used to monitor the socket """ if fn in self.sockets: socket = self.sockets[fn] sock.close(socket) del self.sockets[fn]