def _open(self, url, silent = False): try: self.tries += 1 if self.tries > MAX_REDIRECTS: raise IOError, ('http error', 500, 'Internal Server Error: Redirect Recursion') scheme, netloc, path, pars, query, fragment = urlparse(url) if scheme != 'http' and scheme != 'https': raise IOError, ('url error', 'unknown url type', scheme, url) wanturl = path if pars: wanturl += ';' + pars if query: wanturl += '?' + query proxyhost = find_proxy(url) if proxyhost is None: desthost = netloc desturl = wanturl else: desthost = proxyhost desturl = scheme + '://' + netloc + wanturl try: self.response = None if scheme == 'http': self.connection = btHTTPcon(desthost) else: self.connection = btHTTPScon(desthost) self.connection.request('GET', desturl, None, {'Host': netloc, 'User-Agent': VERSION, 'Accept-Encoding': 'gzip'}) self.response = self.connection.getresponse() except HTTPException as e: raise IOError, ('http error', str(e)) status = self.response.status if status in (301, 302): try: self.connection.close() except: pass self._open(self.response.getheader('Location')) return if status != 200: try: data = self._read() d = bdecode(data) if d.has_key('failure reason'): self.error_return = data return except: pass raise IOError, ('http error', status, self.response.reason) except Exception as e: if not silent: print_exc() print >> sys.stderr, 'zurllib: URL was', url, e
def __init__(self, downloader, url): SingleDownloadHelperInterface.__init__(self) self.downloader = downloader self.baseurl = url try: self.scheme, self.netloc, path, pars, query, fragment = urlparse(url) except: self.downloader.errorfunc('cannot parse http seed address: ' + url) return if self.scheme != 'http': self.downloader.errorfunc('http seed url not http: ' + url) return self.proxyhost = find_proxy(url) try: if self.proxyhost is None: self.connection = HTTPConnection(self.netloc) else: self.connection = HTTPConnection(self.proxyhost) except: self.downloader.errorfunc('cannot connect to http seed: ' + url) return self.seedurl = path if pars: self.seedurl += ';' + pars self.seedurl += '?' if query: self.seedurl += query + '&' self.seedurl += 'info_hash=' + urllib.quote(self.downloader.infohash) self.measure = Measure(downloader.max_rate_period) self.index = None self.url = '' self.requests = [] self.request_size = 0 self.endflag = False self.error = None self.retry_period = 30 self._retry_period = None self.errorcount = 0 self.goodseed = False self.active = False self.cancelled = False self.resched(randint(2, 10))
def _open(self, url, silent=False): try: self.tries += 1 if self.tries > MAX_REDIRECTS: raise IOError, ('http error', 500, 'Internal Server Error: Redirect Recursion') scheme, netloc, path, pars, query, fragment = urlparse(url) if scheme != 'http' and scheme != 'https': raise IOError, ('url error', 'unknown url type', scheme, url) wanturl = path if pars: wanturl += ';' + pars if query: wanturl += '?' + query proxyhost = find_proxy(url) if proxyhost is None: desthost = netloc desturl = wanturl else: desthost = proxyhost desturl = scheme + '://' + netloc + wanturl try: self.response = None if scheme == 'http': self.connection = btHTTPcon(desthost) else: self.connection = btHTTPScon(desthost) self.connection.request( 'GET', desturl, None, { 'Host': netloc, 'User-Agent': VERSION, 'Accept-Encoding': 'gzip' }) self.response = self.connection.getresponse() except HTTPException as e: raise IOError, ('http error', str(e)) status = self.response.status if status in (301, 302): try: self.connection.close() except: pass self._open(self.response.getheader('Location')) return if status != 200: try: data = self._read() d = bdecode(data) if d.has_key('failure reason'): self.error_return = data return except: pass raise IOError, ('http error', status, self.response.reason) except Exception as e: if not silent: print_exc() print >> sys.stderr, 'zurllib: URL was', url, e