def get_response(file, url, errorfunc): try: if file: h = open(file, 'rb') try: # quick test to see if responsefile contains a dict line = h.read(10) front = line.split(':', 1)[0] assert front[0] == 'd' int(front[1:]) except (AssertionError, IOError): errorfunc(file + ' is not a valid responsefile') return None try: h.seek(0) except IOError: try: h.close() except IOError: pass h = open(file, 'rb') else: try: h = urlopen(url) except socket.error: errorfunc(url + ' bad url') return None response = h.read() except IOError as e: errorfunc('problem getting response info - ' + str(e)) return None try: h.close() except (IOError, socket.error): pass try: try: response = bdecode(response) except ValueError: errorfunc("warning: bad data in responsefile") response = bdecode(response, sloppy=1) check_type(response, dict) check_info(response.get('info')) check_type(response.get('announce'), str) except ValueError as e: errorfunc("got bad file info - " + str(e)) return None return response
def get_metainfo(fname, url, errorfunc): with WarningLock(lambda *args: errorfunc("warning: bad data in metafile")): if fname: try: metainfo = MetaInfo.read(fname) except (OSError, TypeError, KeyError, ValueError): errorfunc(fname + ' is not a valid metafile') return None else: try: with urlopen(url) as handle: metainfo = MetaInfo(bdecode(handle.read())) except IOError as e: errorfunc('problem getting response info - ' + str(e)) return None except (TypeError, KeyError, ValueError): errorfunc(fname + ' is not a valid metafile') return None try: check_info(metainfo.get('info')) except ValueError as e: errorfunc("got bad file info - " + str(e)) return None return metainfo
def get_metainfo(fname, url, errorfunc): with WarningLock(lambda *args: errorfunc("warning: bad data in metafile")): if fname: try: metainfo = MetaInfo.read(fname) except (OSError, TypeError, KeyError, ValueError): errorfunc(fname + ' is not a valid metafile') return None else: try: metainfo = MetaInfo(bdecode(geturl(url))) except IOError as e: errorfunc('problem getting response info - ' + str(e)) return None except (TypeError, KeyError, ValueError): errorfunc(fname + ' is not a valid metafile') return None try: check_info(metainfo.get('info')) except ValueError as e: errorfunc("got bad file info - " + str(e)) return None return metainfo
def getTorrentData(self, torrent): """Read a torrent data file from cache""" if torrent in self.torrentDataBuffer: return self.torrentDataBuffer[torrent] fname = os.path.join(self.dir_datacache, hexlify(torrent)) if not os.path.exists(fname): return None try: with open(fname, 'rb') as f: data = bdecode(f.read()) except (IOError, ValueError): data = None self.torrentDataBuffer[fname] = data return data
def getTorrentData(self, torrent): """Read a torrent data file from cache""" if torrent in self.torrentDataBuffer: return self.torrentDataBuffer[torrent] fname = os.path.join(self.dir_datacache, hexlify(torrent).decode()) if not os.path.exists(fname): return None try: with open(fname, 'rb') as f: data = bdecode(f.read()) except (IOError, ValueError): data = None self.torrentDataBuffer[fname] = data return data
def getTorrent(self, torrent, version=-1): """Return the contents of a torrent file If version is -1 (default), get the most recent. If version is specified and > -1, retrieve specified version.""" torrent = hexlify(torrent) fname = os.path.join(self.dir_torrentcache, torrent) if version == -1: version = max(self.getTorrentVariations(torrent)) if version: fname += '.' + str(version) try: with open(fname, 'rb') as f: return bdecode(f.read()) except (IOError, ValueError): return None
def _open(self, url): self.tries += 1 if self.tries > MAX_REDIRECTS: raise IOError(('http error', 500, "Internal Server Error: Redirect Recursion")) (scheme, netloc, path, pars, query, _) = urlparse.urlparse(url) if scheme != 'http' and scheme != 'https': raise IOError(('url error', 'unknown url type', scheme, url)) url = path if pars: url += ';' + pars if query: url += '?' + query # if fragment: try: if scheme == 'http': self.connection = btHTTPcon(netloc) else: self.connection = btHTTPScon(netloc) self.connection.request('GET', url, None, { 'User-Agent': VERSION, 'Accept-Encoding': 'gzip' }) self.response = self.connection.getresponse() except httplib.HTTPException as e: raise IOError(('http error', str(e))) status = self.response.status if status in (301, 302): try: self.connection.close() except socket.error: pass self._open(self.response.getheader('Location')) return if status != 200: try: data = self._read() d = bdecode(data) if 'failure reason' in d: self.error_return = data return except (IOError, ValueError): pass raise IOError(('http error', status, self.response.reason))
def _open(self, url): self.tries += 1 if self.tries > MAX_REDIRECTS: raise IOError(('http error', 500, "Internal Server Error: Redirect Recursion")) (scheme, netloc, path, pars, query, _) = urlparse.urlparse(url) if scheme != 'http' and scheme != 'https': raise IOError(('url error', 'unknown url type', scheme, url)) url = path if pars: url += ';' + pars if query: url += '?' + query # if fragment: try: if scheme == 'http': self.connection = btHTTPcon(netloc) else: self.connection = btHTTPScon(netloc) self.connection.request('GET', url, None, {'User-Agent': VERSION, 'Accept-Encoding': 'gzip'}) self.response = self.connection.getresponse() except httplib.HTTPException as e: raise IOError(('http error', str(e))) status = self.response.status if status in (301, 302): try: self.connection.close() except socket.error: pass self._open(self.response.getheader('Location')) return if status != 200: try: data = self._read() d = bdecode(data) if 'failure reason' in d: self.error_return = data return except (IOError, ValueError): pass raise IOError(('http error', status, self.response.reason))
def _open(self, url): try: self._setconn(url) except HTTPException as e: raise IOError(('http error', str(e))) for _ in range(MAX_REDIRECTS): try: self.connection.request('GET', self.url, None, { 'User-Agent': VERSION, 'Accept-Encoding': 'gzip' }) self.response = self.connection.getresponse() if self.response.status == 200: # Success return if self.response.status in (301, 302): # Redirect self._setconn(self.response.getheader('Location')) continue except HTTPException as e: raise IOError(('http error', str(e))) # Handle bencoded errors try: data = self._read() d = bdecode(data) if 'failure reason' in d: self.error_return = data return except (IOError, ValueError): pass # General HTTP error raise IOError( ('http error', self.response.status, self.response.reason)) else: raise IOError(('http error', 500, "Internal Server Error: Redirect Recursion"))
def _open(self, url): try: self._setconn(url) except HTTPException as e: raise IOError(('http error', str(e))) for _ in range(MAX_REDIRECTS): try: self.connection.request('GET', self.url, None, {'User-Agent': VERSION, 'Accept-Encoding': 'gzip'}) self.response = self.connection.getresponse() if self.response.status == 200: # Success return if self.response.status in (301, 302): # Redirect self._setconn(self.response.getheader('Location')) continue except HTTPException as e: raise IOError(('http error', str(e))) # Handle bencoded errors try: data = self._read() d = bdecode(data) if 'failure reason' in d: self.error_return = data return except (IOError, ValueError): pass # General HTTP error raise IOError(('http error', self.response.status, self.response.reason)) else: raise IOError(('http error', 500, "Internal Server Error: Redirect Recursion"))
def __init__(self, config, rawserver): self.config = config self.response_size = config['response_size'] self.dfile = config['dfile'] self.natcheck = config['nat_check'] favicon = config['favicon'] self.parse_dir_interval = config['parse_dir_interval'] self.favicon = None if favicon: try: with open(favicon, 'r') as h: self.favicon = h.read() except IOError: print "**warning** specified favicon file -- %s -- does not " \ "exist." % favicon self.rawserver = rawserver self.cached = {} # format: infohash: [[time1, l1, s1], ...] self.cached_t = {} # format: infohash: [time, cache] self.times = {} self.state = {} self.seedcount = {} self.allowed_IPs = None self.banned_IPs = None if config['allowed_ips'] or config['banned_ips']: self.allowed_ip_mtime = 0 self.banned_ip_mtime = 0 self.read_ip_lists() self.only_local_override_ip = config['only_local_override_ip'] if self.only_local_override_ip == 2: self.only_local_override_ip = not config['nat_check'] if CHECK_PEER_ID_ENCRYPTED and not CRYPTO_OK: print '**warning** crypto library not installed, cannot ' \ 'completely verify encrypted peers' if os.path.exists(self.dfile): try: with open(self.dfile, 'rb') as h: ds = h.read() tempstate = bdecode(ds) if 'peers' not in tempstate: tempstate = {'peers': tempstate} statefiletemplate(tempstate) self.state = tempstate except (IOError, ValueError, TypeError): print '**warning** statefile ' + self.dfile + \ ' corrupt; resetting' self.downloads = self.state.setdefault('peers', {}) self.completed = self.state.setdefault('completed', {}) self.becache = {} ''' format: infohash: [[l0, s0], [l1, s1], ...] l0,s0 = compact, not requirecrypto=1 l1,s1 = compact, only supportcrypto=1 l2,s2 = [compact, crypto_flag], all peers if --compact_reqd 0: l3,s3 = [ip,port,id] l4,l4 = [ip,port] nopeerid ''' if config['compact_reqd']: x = 3 else: x = 5 self.cache_default = [({}, {}) for _ in xrange(x)] for infohash, ds in self.downloads.iteritems(): self.seedcount[infohash] = 0 for x, y in ds.iteritems(): ip = y['ip'] if self.allowed_IPs and ip not in self.allowed_IPs \ or self.banned_IPs and ip in self.banned_IPs: del ds[x] continue if not y['left']: self.seedcount[infohash] += 1 if y.get('nat', -1): continue gip = y.get('given_ip') if gip and is_valid_ip(gip) and \ (not self.only_local_override_ip or ip in local_IPs): ip = gip self.natcheckOK(infohash, x, ip, y['port'], y) for x in self.downloads: self.times[x] = {} for y in self.downloads[x]: self.times[x][y] = 0 self.trackerid = createPeerID('-T-') random.seed(self.trackerid) self.reannounce_interval = config['reannounce_interval'] self.save_dfile_interval = config['save_dfile_interval'] self.show_names = config['show_names'] rawserver.add_task(self.save_state, self.save_dfile_interval) self.prevtime = clock() self.timeout_downloaders_interval = config[ 'timeout_downloaders_interval'] rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval) self.logfile = None self.log = None if config['logfile'] and config['logfile'] != '-': try: self.logfile = config['logfile'] self.log = open(self.logfile, 'a') sys.stdout = self.log print "# Log Started: ", isotime() except IOError: print "**warning** could not redirect stdout to log file: " + \ sys.exc_info()[0] if config['hupmonitor']: def huphandler(signum, frame, self=self): try: self.log.close() self.log = open(self.logfile, 'a') sys.stdout = self.log print "# Log reopened: ", isotime() except IOError: print "**warning** could not reopen logfile" signal.signal(signal.SIGHUP, huphandler) self.allow_get = config['allow_get'] self.t2tlist = T2TList(config['multitracker_enabled'], self.trackerid, config['multitracker_reannounce_interval'], config['multitracker_maxpeers'], config['http_timeout'], self.rawserver) if config['allowed_list']: if config['allowed_dir']: print '**warning** allowed_dir and allowed_list options ' \ 'cannot be used together' print '**warning** disregarding allowed_dir' config['allowed_dir'] = '' self.allowed = self.state.setdefault('allowed_list', {}) self.allowed_list_mtime = 0 self.parse_allowed() self.remove_from_state('allowed', 'allowed_dir_files') if config['multitracker_allowed'] == 'autodetect': config['multitracker_allowed'] = 'none' config['allowed_controls'] = 0 elif config['allowed_dir']: self.allowed = self.state.setdefault('allowed', {}) self.allowed_dir_files = self.state.setdefault( 'allowed_dir_files', {}) self.allowed_dir_blocked = set() self.parse_allowed() self.remove_from_state('allowed_list') else: self.allowed = None self.remove_from_state('allowed', 'allowed_dir_files', 'allowed_list') if config['multitracker_allowed'] == 'autodetect': config['multitracker_allowed'] = 'none' config['allowed_controls'] = 0 self.uq_broken = urllib.unquote('+') != ' ' self.keep_dead = config['keep_dead'] self.Filter = Filter(rawserver.add_task) aggregator = config['aggregator'] if aggregator == '0': self.is_aggregator = False self.aggregator_key = None else: self.is_aggregator = True if aggregator == '1': self.aggregator_key = None else: self.aggregator_key = aggregator self.natcheck = False send = config['aggregate_forward'] if not send: self.aggregate_forward = None else: sends = send.split(',') self.aggregate_forward = sends[0] self.aggregate_password = sends[1] if len(sends) > 1 else None self.dedicated_seed_id = config['dedicated_seed_id'] self.is_seeded = {} self.cachetime = 0 self.cachetimeupdate()
def announce(self, infohash, peer_id, event=0, downloaded=0, uploaded=0, left=0, num_want=-1, snoop=False): """Send an announce request Arguments: infohash bytes[20] SHA1 hash of bencoded Info dictionary peer_id bytes unique peer ID event int Code indicating purpose of request 0 (empty/update statistics) 1 (download started) 2 (download completed) 3 (download stoped) downloaded int number of bytes downloaded uploaded int number of bytes uploaded left int number of bytes left to download num_want int number of peers to request (optional) snoop bool query tracker without affecting stats Returns: {'interval': int, number of seconds to wait to reannounce 'complete': int, number of seeders 'incomplete': int, number of leechers 'peers': [{'ip': str, Peer IPv4 address 'port': int}] Peer port number 'peers6': [{'ip': str, Peer IPv6 address 'port':int}] Peer port number 'crypto_flags': bytes, crypto capabilities of each peer b'\\x00' Prefers plaintext b'\\x01' Requires encryption 'min interval': int, strict reannounce interval 'tracker id': bytes, unique tracker ID 'warning message': str} message from tracker OR {'failure reason': str} error message from tracker If called with infohash and peer_id, a 'stopped' event is sent, and most trackers will respond with an empty list of peers. """ if snoop: options = [('info_hash', infohash), ('peer_id', peer_id), ('event', 'stopped'), ('port', 0), ('compact', True), ('uploaded', 0), ('downloaded', 0), ('left', 1), ('tracker', True), ('numwant', num_want)] else: # a[:bool(b)] == (a if b else '') basic = [('info_hash', infohash), ('peer_id', peer_id)] + \ [('event', self.events[event])][:bool(event)] stats = [('uploaded', uploaded), ('downloaded', downloaded), ('left', left)] trackercomm = [('key', base64.urlsafe_b64encode(self.key))] + \ [('trackerid', self.trackerid)][:bool(self.trackerid)] + \ [('numwant', num_want)][:(num_want >= 0)] options = basic + self.client + self.peer_options + stats + \ self.crypto_options + trackercomm # In Python 3.5, we can switch to the urlencode line. In the meantime, # keep using RequestURL query = str(RequestURL(options)) # query = urllib.parse.urlencode(options, quote_via=urllib.parse.quote) response, raw = self.send_query(query) if response.status == 200: ret = Response(bdecode(raw)) if 'trackerid' in ret: self.trackerid = ret['trackerid'] return ret try: return Response(bdecode(raw)) except ValueError: raise IOError(('http error', response.status, response.reason))
def _rerequest_single(self, t, s, l, callback): try: closer = [None] def timedout(self=self, l=l, closer=closer): if self.lock.trip(l): self.errorcodes['troublecode'] = 'Problem connecting to ' \ 'tracker - timeout exceeded' self.lock.unwait(l) try: closer[0]() except Exception: pass self.externalsched(timedout, self.timeout) err = None try: url, q = t.split('?', 1) q += '&' + s except ValueError: url = t q = s try: h = urlopen(url + '?' + q) closer[0] = h.close data = h.read() except (IOError, socket.error) as e: err = 'Problem connecting to tracker - ' + str(e) except Exception: err = 'Problem connecting to tracker' try: h.close() except socket.error: pass if err: if self.lock.trip(l): self.errorcodes['troublecode'] = err self.lock.unwait(l) return if data == '': if self.lock.trip(l): self.errorcodes['troublecode'] = 'no data from tracker' self.lock.unwait(l) return try: r = bdecode(data, sloppy=1) check_peers(r) except ValueError as e: if self.lock.trip(l): self.errorcodes['bad_data'] = 'bad data from tracker - ' \ + str(e) self.lock.unwait(l) return if 'failure reason' in r: if self.lock.trip(l): self.errorcodes['rejected'] = self.rejectedmessage + \ r['failure reason'] self.lock.unwait(l) return if self.lock.trip(l, True): # success! self.lock.unwait(l) else: # attempt timed out, don't do a callback callback = lambda: None # even if the attempt timed out, go ahead and process data def add(self=self, r=r, callback=callback): self.postrequest(r, callback) self.externalsched(add) except Exception: self.exception(callback)
def test_bdecode(self): """Test decoding of valid and erroneous sample strings""" self.assertRaises(ValueError, bdecode, b'0:0:') self.assertRaises(ValueError, bdecode, b'ie') self.assertRaises(ValueError, bdecode, b'i341foo382e') self.assertEqual(bdecode(b'i4e'), 4) self.assertEqual(bdecode(b'i0e'), 0) self.assertEqual(bdecode(b'i123456789e'), 123456789) self.assertEqual(bdecode(b'i-10e'), -10) self.assertRaises(ValueError, bdecode, b'i-0e') self.assertRaises(ValueError, bdecode, b'i123') self.assertRaises(ValueError, bdecode, b'') self.assertRaises(ValueError, bdecode, b'i6easd') self.assertRaises(ValueError, bdecode, b'35208734823ljdahflajhdf') self.assertRaises(ValueError, bdecode, b'2:abfdjslhfld') self.assertEqual(bdecode(b'0:'), '') self.assertEqual(bdecode(b'3:abc'), 'abc') self.assertEqual(bdecode(b'10:1234567890'), '1234567890') self.assertRaises(ValueError, bdecode, b'02:xy') self.assertRaises(ValueError, bdecode, b'l') self.assertEqual(bdecode(b'le'), []) self.assertRaises(ValueError, bdecode, b'leanfdldjfh') self.assertEqual(bdecode(b'l0:0:0:e'), ['', '', '']) self.assertRaises(ValueError, bdecode, b'relwjhrlewjh') self.assertEqual(bdecode(b'li1ei2ei3ee'), [1, 2, 3]) self.assertEqual(bdecode(b'l3:asd2:xye'), ['asd', 'xy']) self.assertEqual(bdecode(b'll5:Alice3:Bobeli2ei3eee'), [['Alice', 'Bob'], [2, 3]]) self.assertRaises(ValueError, bdecode, b'd') self.assertRaises(ValueError, bdecode, b'defoobar') self.assertEqual(bdecode(b'de'), {}) self.assertEqual(bdecode(b'd3:agei25e4:eyes4:bluee'), {'age': 25, 'eyes': 'blue'}) self.assertEqual( bdecode(b'd8:spam.mp3d6:author5:Alice6:lengthi100000eee'), {'spam.mp3': {'author': 'Alice', 'length': 100000}}) self.assertRaises(ValueError, bdecode, b'd3:fooe') self.assertRaises(ValueError, bdecode, b'di1e0:e') self.assertRaises(ValueError, bdecode, b'd1:b0:1:a0:e') self.assertRaises(ValueError, bdecode, b'd1:a0:1:a0:e') self.assertRaises(ValueError, bdecode, b'i03e') self.assertRaises(ValueError, bdecode, b'l01:ae') self.assertRaises(ValueError, bdecode, b'9999:x') self.assertRaises(ValueError, bdecode, b'l0:') self.assertRaises(ValueError, bdecode, b'd0:0:') self.assertRaises(ValueError, bdecode, b'd0:')
def announce(self, infohash, peer_id, event=0, downloaded=0, uploaded=0, left=0, num_want=-1, snoop=False): """Send an announce request Arguments: infohash bytes[20] SHA1 hash of bencoded Info dictionary peer_id bytes unique peer ID event int Code indicating purpose of request 0 (empty/update statistics) 1 (download started) 2 (download completed) 3 (download stoped) downloaded int number of bytes downloaded uploaded int number of bytes uploaded left int number of bytes left to download num_want int number of peers to request (optional) snoop bool query tracker without affecting stats Returns: {'interval': int, number of seconds to wait to reannounce 'complete': int, number of seeders 'incomplete': int, number of leechers 'peers': [{'ip': str, Peer IPv4 address 'port': int}] Peer port number 'peers6': [{'ip': str, Peer IPv6 address 'port':int}] Peer port number 'crypto_flags': bytes, crypto capabilities of each peer b'\\x00' Prefers plaintext b'\\x01' Requires encryption 'min interval': int, strict reannounce interval 'tracker id': bytes, unique tracker ID 'warning message': str} message from tracker OR {'failure reason': str} error message from tracker If called with infohash and peer_id, a 'stopped' event is sent, and most trackers will respond with an empty list of peers. """ if snoop: options = [('info_hash', infohash), ('peer_id', peer_id), ('event', 'stopped'), ('port', 0), ('compact', True), ('uploaded', 0), ('downloaded', 0), ('left', 1), ('tracker', True), ('numwant', num_want)] else: # a[:bool(b)] == (a if b else '') basic = [('info_hash', infohash), ('peer_id', peer_id)] + \ [('event', self.events[event])][:bool(event)] stats = [('uploaded', uploaded), ('downloaded', downloaded), ('left', left)] trackercomm = [('key', base64.urlsafe_b64encode(self.key))] + \ [('trackerid', self.trackerid)][:bool(self.trackerid)] + \ [('numwant', num_want)][:(num_want >= 0)] options = basic + self.client + self.peer_options + stats + \ self.crypto_options + trackercomm # In Python 3.5, we can switch to the urlencode line. In the meantime, # keep using RequestURL query = str(RequestURL(options)) #query = urllib.parse.urlencode(options, quote_via=urllib.parse.quote) response, raw = self.send_query(query) if response.status == 200: ret = Response(bdecode(raw)) if 'trackerid' in ret: self.trackerid = ret['trackerid'] return ret try: return Response(bdecode(raw)) except ValueError: raise IOError(('http error', response.status, response.reason))
def test_bdecode(self): """Test decoding of valid and erroneous sample strings""" self.assertRaises(ValueError, bdecode, b'0:0:') self.assertRaises(ValueError, bdecode, b'ie') self.assertRaises(ValueError, bdecode, b'i341foo382e') self.assertEqual(bdecode(b'i4e'), 4) self.assertEqual(bdecode(b'i0e'), 0) self.assertEqual(bdecode(b'i123456789e'), 123456789) self.assertEqual(bdecode(b'i-10e'), -10) self.assertRaises(ValueError, bdecode, b'i-0e') self.assertRaises(ValueError, bdecode, b'i123') self.assertRaises(ValueError, bdecode, b'') self.assertRaises(ValueError, bdecode, b'i6easd') self.assertRaises(ValueError, bdecode, b'35208734823ljdahflajhdf') self.assertRaises(ValueError, bdecode, b'2:abfdjslhfld') self.assertEqual(bdecode(b'0:'), '') self.assertEqual(bdecode(b'3:abc'), 'abc') self.assertEqual(bdecode(b'10:1234567890'), '1234567890') self.assertRaises(ValueError, bdecode, b'02:xy') self.assertRaises(ValueError, bdecode, b'l') self.assertEqual(bdecode(b'le'), []) self.assertRaises(ValueError, bdecode, b'leanfdldjfh') self.assertEqual(bdecode(b'l0:0:0:e'), ['', '', '']) self.assertRaises(ValueError, bdecode, b'relwjhrlewjh') self.assertEqual(bdecode(b'li1ei2ei3ee'), [1, 2, 3]) self.assertEqual(bdecode(b'l3:asd2:xye'), ['asd', 'xy']) self.assertEqual(bdecode(b'll5:Alice3:Bobeli2ei3eee'), [['Alice', 'Bob'], [2, 3]]) self.assertRaises(ValueError, bdecode, b'd') self.assertRaises(ValueError, bdecode, b'defoobar') self.assertEqual(bdecode(b'de'), {}) self.assertEqual(bdecode(b'd3:agei25e4:eyes4:bluee'), { 'age': 25, 'eyes': 'blue' }) self.assertEqual( bdecode(b'd8:spam.mp3d6:author5:Alice6:lengthi100000eee'), {'spam.mp3': { 'author': 'Alice', 'length': 100000 }}) self.assertRaises(ValueError, bdecode, b'd3:fooe') self.assertRaises(ValueError, bdecode, b'di1e0:e') self.assertRaises(ValueError, bdecode, b'd1:b0:1:a0:e') self.assertRaises(ValueError, bdecode, b'd1:a0:1:a0:e') self.assertRaises(ValueError, bdecode, b'i03e') self.assertRaises(ValueError, bdecode, b'l01:ae') self.assertRaises(ValueError, bdecode, b'9999:x') self.assertRaises(ValueError, bdecode, b'l0:') self.assertRaises(ValueError, bdecode, b'd0:0:') self.assertRaises(ValueError, bdecode, b'd0:')
def _rerequest_single(self, tracker, querystring, code, callback): try: closer = [None] def timedout(self=self, code=code, closer=closer): if self.lock.trip(code): self.errorcodes['troublecode'] = 'Problem connecting to ' \ 'tracker - timeout exceeded' self.lock.unwait(code) try: closer[0]() except Exception: pass self.sched(timedout, self.timeout) err = None if '?' in tracker: url, qstring = tracker.split('?', 1) querystring = qstring + '&' + querystring else: url = tracker try: with urlopen(url + '?' + querystring) as h: closer[0] = h.close data = h.read() except (IOError, socket.error) as e: err = 'Problem connecting to tracker - ' + str(e) except Exception: err = 'Problem connecting to tracker' if err: if self.lock.trip(code): self.errorcodes['troublecode'] = err self.lock.unwait(code) return if data == '': if self.lock.trip(code): self.errorcodes['troublecode'] = 'no data from tracker' self.lock.unwait(code) return try: response = Response(bdecode(data, sloppy=1)) check_peers(response) except ValueError as e: if self.lock.trip(code): self.errorcodes['bad_data'] = 'bad data from tracker - ' \ + str(e) self.lock.unwait(code) return if 'failure reason' in response: if self.lock.trip(code): self.errorcodes['rejected'] = self.rejectedmessage + \ response['failure reason'] self.lock.unwait(code) return if self.lock.trip(code, True): # success! self.lock.unwait(code) else: # attempt timed out, don't do a callback callback = lambda: None # even if the attempt timed out, go ahead and process data def add(self=self, response=response, callback=callback): self.postrequest(response, callback) self.sched(add) except Exception: self.exception(callback)