def _open(self, url, silent=False): try: self.tries += 1 if self.tries > MAX_REDIRECTS: raise IOError, ('http error', 500, "Internal Server Error: Redirect Recursion") (scheme, netloc, path, pars, query, fragment) = urlparse(url) if scheme != 'http' and scheme != 'https': raise IOError, ('url error', 'unknown url type', scheme, url) wanturl = path if pars: wanturl += ';' + pars if query: wanturl += '?' + query # if fragment: proxyhost = find_proxy(url) if proxyhost is None: desthost = netloc desturl = wanturl else: desthost = proxyhost desturl = scheme + '://' + netloc + wanturl try: self.response = None if scheme == 'http': self.connection = btHTTPcon(desthost) else: self.connection = btHTTPScon(desthost) self.connection.request( 'GET', desturl, None, { 'Host': netloc, 'User-Agent': VERSION, 'Accept-Encoding': 'gzip' }) self.response = self.connection.getresponse() except HTTPException, e: raise IOError, ('http error', str(e)) status = self.response.status if status in (301, 302): try: self.connection.close() except: pass self._open(self.response.getheader('Location'), silent) return if status != 200: try: data = self._read() d = bdecode(data) if d.has_key('failure reason'): self.error_return = data return except: pass raise IOError, ('http error', status, self.response.reason)
def _open(self, url, silent = False): try: self.tries += 1 if self.tries > MAX_REDIRECTS: raise IOError, ('http error', 500, "Internal Server Error: Redirect Recursion") (scheme, netloc, path, pars, query, fragment) = urlparse(url) if scheme != 'http' and scheme != 'https': raise IOError, ('url error', 'unknown url type', scheme, url) wanturl = path if pars: wanturl += ';'+pars if query: wanturl += '?'+query # if fragment: proxyhost = find_proxy(url) if proxyhost is None: desthost = netloc desturl = wanturl else: desthost = proxyhost desturl = scheme+'://'+netloc+wanturl try: self.response = None if scheme == 'http': self.connection = btHTTPcon(desthost) else: self.connection = btHTTPScon(desthost) self.connection.request('GET', desturl, None, { 'Host': netloc, 'User-Agent': VERSION, 'Accept-Encoding': 'gzip' } ) self.response = self.connection.getresponse() except HTTPException, e: raise IOError, ('http error', str(e)) status = self.response.status if status in (301, 302): try: self.connection.close() except: pass self._open(self.response.getheader('Location')) return if status != 200: try: data = self._read() d = bdecode(data) if d.has_key('failure reason'): self.error_return = data return except: pass raise IOError, ('http error', status, self.response.reason)
def __init__(self, downloader, url, video_support_policy): self.downloader = downloader self.baseurl = url try: (self.scheme, self.netloc, path, pars, query, fragment) = urlparse(url) except: self.downloader.errorfunc('cannot parse http seed address: ' + url) return if self.scheme != 'http': self.downloader.errorfunc('http seed url not http: ' + url) return # Arno, 2010-03-08: Make proxy aware self.proxyhost = find_proxy(url) try: if self.proxyhost is None: self.connection = HTTPConnection(self.netloc) else: self.connection = HTTPConnection(self.proxyhost) except: self.downloader.errorfunc('cannot connect to http seed: ' + url) return self.seedurl = path self.measure = Measure(downloader.max_rate_period) self.index = None self.piece_size = self.downloader.storage._piecelen(0) self.total_len = self.downloader.storage.total_length self.url = '' self.requests = [] self.request_size = 0 self.endflag = False self.error = None self.retry_period = 30 self._retry_period = None self.errorcount = 0 self.goodseed = False self.active = False self.cancelled = False # HTTP Video Support self.request_lock = Lock() self.video_support_policy = video_support_policy # Niels: 08-03-2012 using svc_video or play_video in download_bt1 self.video_support_enabled = False # Don't start immediately with support self.video_support_speed = 0.0 # Start with the faster rescheduling speed self.video_support_slow_start = False # If enabled delay the first request (give chance to peers to give bandwidth) # Arno, 2010-04-07: Wait 1 second before using HTTP seed. TODO good policy # If Video Support policy is not eneabled then use Http seed normaly if not self.video_support_policy: self.resched(1)
def __init__(self, downloader, url, video_support_policy): self.downloader = downloader self.baseurl = url try: (self.scheme, self.netloc, path, pars, query, fragment) = urlparse(url) except: self.downloader.errorfunc('cannot parse http seed address: '+url) return if self.scheme != 'http': self.downloader.errorfunc('http seed url not http: '+url) return # Arno, 2010-03-08: Make proxy aware self.proxyhost = find_proxy(url) try: if self.proxyhost is None: self.connection = HTTPConnection(self.netloc) else: self.connection = HTTPConnection(self.proxyhost) except: self.downloader.errorfunc('cannot connect to http seed: '+url) return self.seedurl = path self.measure = Measure(downloader.max_rate_period) self.index = None self.piece_size = self.downloader.storage._piecelen( 0 ) self.total_len = self.downloader.storage.total_length self.url = '' self.requests = [] self.request_size = 0 self.endflag = False self.error = None self.retry_period = 30 self._retry_period = None self.errorcount = 0 self.goodseed = False self.active = False self.cancelled = False # HTTP Video Support self.request_lock = Lock() self.video_support_policy = video_support_policy # Niels: 08-03-2012 using svc_video or play_video in download_bt1 self.video_support_enabled = False # Don't start immediately with support self.video_support_speed = 0.0 # Start with the faster rescheduling speed self.video_support_slow_start = False # If enabled delay the first request (give chance to peers to give bandwidth) # Arno, 2010-04-07: Wait 1 second before using HTTP seed. TODO good policy # If Video Support policy is not eneabled then use Http seed normaly if not self.video_support_policy: self.resched(1)
def __init__(self, downloader, url): self.downloader = downloader self.baseurl = url try: (self.scheme, self.netloc, path, pars, query, fragment) = urlparse(url) except: self.downloader.errorfunc('cannot parse http seed address: ' + url) return if self.scheme != 'http': self.downloader.errorfunc('http seed url not http: ' + url) return # Arno, 2010-03-08: Make proxy aware self.proxyhost = find_proxy(url) try: if self.proxyhost is None: self.connection = HTTPConnection(self.netloc) else: self.connection = HTTPConnection(self.proxyhost) except: self.downloader.errorfunc('cannot connect to http seed: ' + url) return self.seedurl = path if pars: self.seedurl += ';' + pars self.seedurl += '?' if query: self.seedurl += query + '&' self.seedurl += 'info_hash=' + urllib.quote(self.downloader.infohash) self.measure = Measure(downloader.max_rate_period) self.index = None self.url = '' self.requests = [] self.request_size = 0 self.endflag = False self.error = None self.retry_period = 30 self._retry_period = None self.errorcount = 0 self.goodseed = False self.active = False self.cancelled = False self.resched(randint(2, 10))
def __init__(self, downloader, url): self.downloader = downloader self.baseurl = url try: (self.scheme, self.netloc, path, pars, query, fragment) = urlparse(url) except: self.downloader.errorfunc('cannot parse http seed address: '+url) return if self.scheme != 'http': self.downloader.errorfunc('http seed url not http: '+url) return # Arno, 2010-03-08: Make proxy aware self.proxyhost = find_proxy(url) try: if self.proxyhost is None: self.connection = HTTPConnection(self.netloc) else: self.connection = HTTPConnection(self.proxyhost) except: self.downloader.errorfunc('cannot connect to http seed: '+url) return self.seedurl = path if pars: self.seedurl += ';'+pars self.seedurl += '?' if query: self.seedurl += query+'&' self.seedurl += 'info_hash='+urllib.quote(self.downloader.infohash) self.measure = Measure(downloader.max_rate_period) self.index = None self.url = '' self.requests = [] self.request_size = 0 self.endflag = False self.error = None self.retry_period = 30 self._retry_period = None self.errorcount = 0 self.goodseed = False self.active = False self.cancelled = False self.resched(randint(2, 10))
def post(self, xml_str): """ Post a status report to the living lab using multipart/form-data This is a bit on the messy side, but it does work """ # print >>sys.stderr, xml_str self.num_reports += 1 boundary = "------------------ThE_bOuNdArY_iS_hErE_$" # headers = {"Host":self.host, # "User-Agent":"NextShare status reporter 2009.4", # "Content-Type":"multipart/form-data; boundary=" + boundary} base = ["--" + boundary + "--"] base.append('Content-Disposition: form-data; name="NextShareData"; filename="NextShareData"') base.append("Content-Type: text/xml") base.append("") base.append(xml_str) base.append("--" + boundary + "--") base.append("") base.append("") body = "\r\n".join(base) # Arno, 2010-03-09: Make proxy aware and use modern httplib classes wanturl = 'http://' + self.host +self.path proxyhost = find_proxy(wanturl) if proxyhost is None: desthost = self.host desturl = self.path else: desthost = proxyhost desturl = wanturl h = httplib.HTTPConnection(desthost) h.putrequest("POST", desturl) # 08/11/10 Boudewijn: do not send Host, it is automatically # generated from h.putrequest. Sending it twice causes # invalid HTTP and Virtual Hosts to # fail. # h.putheader("Host",self.host) h.putheader("User-Agent", "NextShare status reporter 2010.3") h.putheader("Content-Type", "multipart/form-data; boundary=" + boundary) h.putheader("Content-Length", str(len(body))) h.endheaders() h.send(body) resp = h.getresponse() if DEBUG: # print >>sys.stderr, "LivingLabReporter:\n", xml_str print >>sys.stderr, "LivingLabReporter:", repr(resp.status), repr(resp.reason), "\n", resp.getheaders(), "\n", resp.read().replace("\\n", "\n") if resp.status != 200: if self.error_handler: try: self.error_handler(resp.status, resp.read()) except Exception as e: pass else: print >> sys.stderr, "Error posting but no error handler:", resp.status print >> sys.stderr, resp.read()