def get_scrape(self, paramslist): fs = {} if paramslist.has_key('info_hash'): if self.config['scrape_allowed'] not in ['specific', 'full']: return (400, 'Not Authorized', default_headers, bencode({'failure_reason': "specific scrape function is not available with this tracker."})) for infohash in paramslist['info_hash']: if self.allowed is not None and infohash not in self.allowed: continue if infohash in self.downloads: fs[infohash] = self.scrapedata(infohash) else: if self.config['scrape_allowed'] != 'full': return (400, 'Not Authorized', default_headers, bencode({'failure reason': "full scrape function is not available with this tracker."})) #bencode({'failure reason': #_("full scrape function is not available with this tracker.")})) if self.allowed is not None: hashes = self.allowed else: hashes = self.downloads for infohash in hashes: fs[infohash] = self.scrapedata(infohash) return (200, 'OK', {'Content-Type': 'text/plain'}, bencode({'files': fs}))
def make_meta_file_dht(path, nodes, piece_len_exp, flag=Event(), progress=dummy, title=None, comment=None, safe=None, content_type=None, target=None, data_dir=None): # if nodes is empty, then get them out of the routing table in data_dir # else, expect nodes to be a string of comma seperated <ip>:<port> pairs # this has a lot of duplicated code from make_meta_file piece_length = 2**piece_len_exp a, b = os.path.split(path) if not target: if b == '': f = a + '.torrent' else: f = os.path.join(a, b + '.torrent') else: f = target info = makeinfo(path, piece_length, flag, progress, content_type) if flag.isSet(): return check_info(info) info_hash = sha1(bencode(info)).digest() if not nodes: x = open(os.path.join(data_dir, 'routing_table'), 'rb') d = bdecode(x.read()) x.close() t = KTable(Node().initWithDict({ 'id': d['id'], 'host': '127.0.0.1', 'port': 0 })) for n in d['rt']: t.insertNode(Node().initWithDict(n)) nodes = [(node.host, node.port) for node in t.findNodes(info_hash) if node.host != '127.0.0.1'] else: nodes = [ (a[0], int(a[1])) for a in [node.strip().split(":") for node in nodes.split(",")] ] data = {'nodes': nodes, 'creation date': int(gmtime())} h = file(f, 'wb') data['info'] = info if title: data['title'] = title if comment: data['comment'] = comment if safe: data['safe'] = safe h.write(bencode(data)) h.close()
def _got_peers(self, peers): if not self.howmany: return if not peers: self.peerid = self.wanted_peerid self._postrequest(bencode({'peers': ''})) else: self.peerid = None self._postrequest(bencode({'peers': peers[0]}))
def _got_peers(self, peers): if not self.howmany: return if not peers: self.peerid = self.wanted_peerid self._postrequest(bencode({'peers':''})) else: self.peerid = None self._postrequest(bencode({'peers':peers[0]}))
def dumpd(params, methodname=None, methodresponse=None, encoding=None, allow_none=False, tid=None): assert tid is not None, "need a transaction identifier" if methodname: out = bencode({'y':'q', 't':tid, 'q':methodname, 'a':params}) elif isinstance(params, DFault): out = bencode({'y':'e', 't':tid, 'c':params.faultCode, 's':params.faultString}) elif methodresponse: out = bencode({'y':'r', 't':tid, 'r':params}) else: raise Error("") return out
def make_meta_file_dht( path, nodes, piece_len_exp, flag=Event(), progress=dummy, title=None, comment=None, safe=None, content_type=None, target=None, data_dir=None, ): # if nodes is empty, then get them out of the routing table in data_dir # else, expect nodes to be a string of comma seperated <ip>:<port> pairs # this has a lot of duplicated code from make_meta_file piece_length = 2 ** piece_len_exp a, b = os.path.split(path) if not target: if b == "": f = a + ".torrent" else: f = os.path.join(a, b + ".torrent") else: f = target info = makeinfo(path, piece_length, flag, progress, content_type) if flag.isSet(): return check_info(info) info_hash = sha(bencode(info)).digest() if not nodes: x = open(os.path.join(data_dir, "routing_table"), "rb") d = bdecode(x.read()) x.close() t = KTable(Node().initWithDict({"id": d["id"], "host": "127.0.0.1", "port": 0})) for n in d["rt"]: t.insertNode(Node().initWithDict(n)) nodes = [(node.host, node.port) for node in t.findNodes(info_hash) if node.host != "127.0.0.1"] else: nodes = [(a[0], int(a[1])) for a in [node.strip().split(":") for node in nodes.split(",")]] data = {"nodes": nodes, "creation date": int(time())} h = file(f, "wb") data["info"] = info if title: data["title"] = title if comment: data["comment"] = comment if safe: data["safe"] = safe h.write(bencode(data)) h.close()
def dumps(params, methodname=None, methodresponse=None, encoding=None, allow_none=False): if methodresponse and isinstance(params, TupleType): assert len(params) == 1, "response tuple must be a singleton" if methodname: out = bencode({'y':'q', 'q':methodname, 'a':params}) elif isinstance(params, Fault): out = bencode({'y':'e', 'c':params.faultCode, 's':params.faultString}) elif methodresponse: out = bencode({'y':'r', 'r':params[0]}) else: raise Error("") return out
def check_allowed(self, infohash, paramslist): if self.allowed is not None: if not self.allowed.has_key(infohash): return (200, 'Not Authorized', default_headers, bencode({'failure reason': "Requested download is not authorized for use with this tracker."})) #_("Requested download is not authorized for use with this tracker.")})) if self.config['allowed_controls']: if self.allowed[infohash].has_key('failure reason'): return (200, 'Not Authorized', default_headers, bencode({'failure reason': self.allowed[infohash]['failure reason']})) return None
def natcheckOK(self, infohash, peerid, ip, port, not_seed): bc = self.becache.setdefault(infohash, [[{}, {}], [{}, {}], [{}, {}]]) bc[0][not not_seed][peerid] = Bencached( bencode({ 'ip': ip, 'port': port, 'peer id': peerid })) bc[1][not not_seed][peerid] = Bencached( bencode({ 'ip': ip, 'port': port })) bc[2][not not_seed][peerid] = compact_peer_info(ip, port)
def make_meta_file(path, url, piece_len_exp, flag=Event(), progress=dummy, title=None, comment=None, safe=None, content_type=None, target=None, url_list=None, name=None): data = {'announce': url.strip(), 'creation date': int(gmtime())} piece_length = 2 ** piece_len_exp a, b = os.path.split(path) if not target: if b == '': f = a + '.torrent' else: f = os.path.join(a, b + '.torrent') else: f = target info = makeinfo(path, piece_length, flag, progress, name, content_type) if flag.isSet(): return check_info(info) h = file(f, 'wb') data['info'] = info lang = read_language_file() or 'en' if lang: data['locale'] = lang if title: data['title'] = title if comment: data['comment'] = comment if safe: data['safe'] = safe if url_list: data['url-list'] = url_list h.write(bencode(data)) h.close()
def make_meta_file_dht(path, nodes, piece_len_exp, flag=Event(), progress=dummy, title=None, comment=None, safe=None, content_type=None, target=None, data_dir=None): # if nodes is empty, then get them out of the routing table in data_dir # else, expect nodes to be a string of comma seperated <ip>:<port> pairs # this has a lot of duplicated code from make_meta_file piece_length = 2 ** piece_len_exp a, b = os.path.split(path) if not target: if b == '': f = a + '.torrent' else: f = os.path.join(a, b + '.torrent') else: f = target info = makeinfo(path, piece_length, flag, progress, content_type) if flag.isSet(): return check_info(info) info_hash = sha(bencode(info)).digest() if not nodes: x = open(os.path.join(data_dir, 'routing_table'), 'rb') d = bdecode(x.read()) x.close() t = KTable(Node().initWithDict({'id':d['id'], 'host':'127.0.0.1','port': 0})) for n in d['rt']: t.insertNode(Node().initWithDict(n)) nodes = [(node.host, node.port) for node in t.findNodes(info_hash) if node.host != '127.0.0.1'] else: nodes = [(a[0], int(a[1])) for a in [node.strip().split(":") for node in nodes.split(",")]] data = {'nodes': nodes, 'creation date': int(gmtime())} h = file(f, 'wb') data['info'] = info if title: data['title'] = title if comment: data['comment'] = comment if safe: data['safe'] = safe h.write(bencode(data)) h.close()
def sendRequest(self, method, args): # make message # send it msg = {TID : chr(self.mtid), TYP : REQ, REQ : method, ARG : args} self.mtid = (self.mtid + 1) % 256 s = bencode(msg) d = Deferred() self.tids[msg[TID]] = d self.call_later(KRPC_TIMEOUT, self.timeOut, msg[TID]) self.call_later(0, self._send, s, d) return d
def get(self): self.response.headers['Content-Type'] = 'text/plain' info_hash = util.GetParam('info_hash') if len(info_hash) == 20: torrents = model.Torrent.gql("WHERE info_hash = :1", base64.b64encode(info_hash)) else: torrents = model.Torrent.gql("ORDER BY downloaded") for torrent in torrents: self.AddResult(torrent) self.response.headers['Content-Type'] = 'text/plain' self.response.out.write(bencode.bencode(self._files))
def checkpoint(self, auto=0): d = {} d['id'] = self.node.id d['rt'] = self._dumpRoutingTable() try: f = open(os.path.join(self.ddir, "routing_table"), 'wb') f.write(bencode(d)) f.close() except Exception, e: #XXX real error here print ">>> unable to dump routing table!", str(e) pass
def start_init(self): if self.dht: infohash = sha(bencode(self.metainfo['value'])).digest() # infohash = 0x0d0d7a9ef71434d31b893cec305264579b7cf262 nodes = self.dht.table.findNodes(infohash) if len(nodes) < const.K: for node in self.metainfo['nodes']: host = node['host'] port = node['port'] self.dht.addContact(host, port) # self.rawserver.add_task(30,self.show_table) self.rawserver.add_task(10, self.dht.getPeersAndAnnounce, infohash, self.metainfo['value'], self.show_value)
def make_meta_file(path, url, piece_len_exp, flag=Event(), progress=dummy, title=None, comment=None, safe=None, content_type=None, target=None, url_list=None, name=None, micropayments=False): data = {'announce': url.strip(), 'creation date': int(gmtime())} piece_length = 2**piece_len_exp a, b = os.path.split(path) if not target: if b == '': f = a + '.torrent' else: f = os.path.join(a, b + '.torrent') else: f = target info = makeinfo(path, piece_length, flag, progress, name, content_type) if flag.isSet(): return check_info(info) h = file(f, 'wb') data['info'] = info lang = read_language_file() or 'en' if lang: data['locale'] = lang if title: data['title'] = title if comment: data['comment'] = comment if safe: data['safe'] = safe if url_list: data['url-list'] = url_list if micropayments: data['micropayments'] = micropayments h.write(bencode(data)) h.close()
def make_meta_file( path, url, piece_len_exp, flag=Event(), progress=dummy, title=None, comment=None, safe=None, content_type=None, target=None, url_list=None, name=None, ): data = {"announce": url.strip(), "creation date": int(time())} piece_length = 2 ** piece_len_exp a, b = os.path.split(path) if not target: if b == "": f = a + ".torrent" else: f = os.path.join(a, b + ".torrent") else: f = target info = makeinfo(path, piece_length, flag, progress, name, content_type) if flag.isSet(): return check_info(info) h = file(f, "wb") data["info"] = info lang = read_language_file() or "en" if lang: data["locale"] = lang if title: data["title"] = title if comment: data["comment"] = comment if safe: data["safe"] = safe if url_list: data["url-list"] = url_list h.write(bencode(data)) h.close()
def natcheckOK(self, infohash, peerid, ip, port, not_seed): bc = self.becache.setdefault(infohash,[[{}, {}], [{}, {}], [{}, {}]]) bc[0][not not_seed][peerid] = Bencached(bencode({'ip': ip, 'port': port, 'peer id': peerid})) bc[1][not not_seed][peerid] = Bencached(bencode({'ip': ip, 'port': port})) bc[2][not not_seed][peerid] = compact_peer_info(ip, port)
def get(self): info_hash = util.GetParam('info_hash') if len(info_hash) != 20: self.error("Invalid info_hash argument (%d != 20)" % (len(info_hash))) return peer_id = self.request.get('peer_id') if len(peer_id) != 20: self.error("Invalid peer_id argument") return ip = os.environ['REMOTE_ADDR'] # TODO(aporter): Get the optional ip from the client (for proxies) try: port = int(self.request.get('port')) except ValueError: self.error("Invalid port argument") return try: uploaded = int(self.request.get('uploaded')) except ValueError: self.error("Invalid uploaded argument") return try: downloaded = int(self.request.get('downloaded')) except ValueError: self.error("Invalid downloaded argument") return try: left = int(self.request.get('left')) except ValueError: self.error("Invalid left argument") return try: compact = int(self.request.get('compact')) except ValueError: self.error("Invalid compact argument") return try: numwant = int(self.request.get('numwant')) except ValueError: self.error("Invalid numwant argument") return if compact != 0 and compact != 1: self.error("Invalid corrupt argument") return self._compact = (compact == 1) no_peer_id = not compact # TODO(aporter): Event handling event = self.request.get('event', None) # TODO(aporter): Used to prove identity to tracker if ip changes key = self.request.get('key') torrents = model.Torrent.gql("WHERE info_hash = :1", base64.b64encode(info_hash)) if torrents.count() == 1: torrent = torrents[0] else: torrent = model.Torrent(info_hash=base64.b64encode(info_hash)) torrent.put() peers = model.TorrentPeerEntry.gql( "WHERE torrent = :1 AND peer_id = :2 AND ip = :3 AND port = :4", torrent, peer_id, ip, port) if peers.count() == 1: peer = peers[0] # TODO(aporter): Should we reject peers with differing IP and port or # should they be allowed to update? This depends on if this peer id can # be guessed by others or not. peer.ip = ip peer.port = port peer.last_datetime = datetime.datetime.now() else: peer = model.TorrentPeerEntry(torrent=torrent, ip=ip, port=port, peer_id=peer_id, last_datetime=datetime.datetime.now()) peer.downloaded = downloaded peer.uploaded = uploaded peer.put() self.BuildPeersResult(torrent, peer_id) self.CleanupOldPeers(torrent) # TOOD(aporter): Respect maximum number of peers num_complete = 1 num_incomplete = 0 self.response.headers['Content-Type'] = 'text/plain' result = { "interval" : 60, "complete" : num_complete, "incomplete" : num_incomplete, "peers" : self._peers } self.response.out.write(bencode.bencode(result))
if f and callable(f): try: ret = apply(f, (), msg[ARG]) except KRPCFailSilently: pass except KRPCServerError, e: olen = self.sendErr(addr, msg[TID], 202, "Server Error: %s" % e.args[0]) except KRPCProtocolError, e: olen = self.sendErr(addr, msg[TID], 204, "Protocol Error: %s" % e.args[0]) except Exception, e: print_exc(20) olen = self.sendErr(addr, msg[TID], 202, "Server Error") else: if ret: # make response out = bencode({TID : msg[TID], TYP : RSP, RSP : ret}) else: out = bencode({TID : msg[TID], TYP : RSP, RSP : {}}) # send response olen = len(out) self.rltransport.sendto(out, 0, addr) else: if self.noisy: #print "don't know about method %s" % msg[REQ] pass # unknown method olen = self.sendErr(addr, msg[TID], *KERR_METHOD_UNKNOWN) if self.noisy: try: ndist = 10 * log10(2**160 * 1.0 / distance(self.factory.node.id, msg[ARG]['id']))
class Tracker(object): def __init__(self, config, rawserver): self.config = config self.response_size = config['response_size'] self.max_give = config['max_give'] self.dfile = efs2(config['dfile']) self.natcheck = config['nat_check'] favicon = config['favicon'] self.favicon = None if favicon: try: h = open(favicon,'r') self.favicon = h.read() h.close() except: errorfunc(logging.WARNING, _("specified favicon file -- %s -- does not exist.") % favicon) self.rawserver = rawserver self.cached = {} # format: infohash: [[time1, l1, s1], [time2, l2, s2], [time3, l3, s3]] self.cached_t = {} # format: infohash: [time, cache] self.times = {} self.state = {} self.seedcount = {} self.save_pending = False self.parse_pending = False self.only_local_override_ip = config['only_local_override_ip'] if self.only_local_override_ip == 2: self.only_local_override_ip = not config['nat_check'] if os.path.exists(self.dfile): try: h = open(self.dfile, 'rb') ds = h.read() h.close() try: tempstate = cPickle.loads(ds) except: tempstate = bdecode(ds) # backwards-compatibility. if not tempstate.has_key('peers'): tempstate = {'peers': tempstate} statefiletemplate(tempstate) self.state = tempstate except: errorfunc(logging.WARNING, _("statefile %s corrupt; resetting") % self.dfile) self.downloads = self.state.setdefault('peers', {}) self.completed = self.state.setdefault('completed', {}) self.becache = {} # format: infohash: [[l1, s1], [l2, s2], [l3, s3]] for infohash, ds in self.downloads.iteritems(): self.seedcount[infohash] = 0 for x, y in ds.iteritems(): if not y.get('nat', -1): ip = y.get('given_ip') if not (ip and self.allow_local_override(y['ip'], ip)): ip = y['ip'] self.natcheckOK(infohash, x, ip, y['port'], y['left']) if not y['left']: self.seedcount[infohash] += 1 for infohash in self.downloads: self.times[infohash] = {} for peerid in self.downloads[infohash]: self.times[infohash][peerid] = 0 self.reannounce_interval = config['reannounce_interval'] self.save_dfile_interval = config['save_dfile_interval'] self.show_names = config['show_names'] rawserver.add_task(self.save_dfile_interval, self.save_dfile) self.prevtime = time() self.timeout_downloaders_interval = config['timeout_downloaders_interval'] rawserver.add_task(self.timeout_downloaders_interval, self.expire_downloaders) self.logfile = None self.log = None if (config['logfile'] != '') and (config['logfile'] != '-'): try: self.logfile = config['logfile'] self.log = open(self.logfile, 'a') sys.stdout = self.log print _("# Log Started: "), isotime() except: print _("**warning** could not redirect stdout to log file: "), sys.exc_info()[0] if config['hupmonitor']: def huphandler(signum, frame, self = self): try: self.log.close () self.log = open(self.logfile, 'a') sys.stdout = self.log print _("# Log reopened: "), isotime() except: print _("***warning*** could not reopen logfile") signal.signal(signal.SIGHUP, huphandler) self.allow_get = config['allow_get'] if config['allowed_dir'] != '': self.allowed_dir = config['allowed_dir'] self.parse_dir_interval = config['parse_dir_interval'] self.allowed = self.state.setdefault('allowed', {}) self.allowed_dir_files = self.state.setdefault('allowed_dir_files', {}) self.allowed_dir_blocked = {} self.parse_allowed() else: try: del self.state['allowed'] except: pass try: del self.state['allowed_dir_files'] except: pass self.allowed = None self.uq_broken = unquote('+') != ' ' self.keep_dead = config['keep_dead'] def allow_local_override(self, ip, given_ip): return is_valid_ipv4(given_ip) and ( not self.only_local_override_ip or is_local_ip(ip) ) def get_infopage(self): try: if not self.config['show_infopage']: return (404, 'Not Found', default_headers, alas) red = self.config['infopage_redirect'] if red != '': return (302, 'Found', {'Content-Type': 'text/html', 'Location': red}, '<A HREF="'+red+'">Click Here</A>') s = StringIO() s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \ '<html><head><title>BitTorrent download info</title>\n') if self.favicon is not None: s.write('<link rel="shortcut icon" href="/favicon.ico">\n') s.write('</head>\n<body>\n' \ '<h3>BitTorrent download info</h3>\n'\ '<ul>\n' '<li><strong>tracker version:</strong> %s</li>\n' \ '<li><strong>server time:</strong> %s</li>\n' \ '</ul>\n' % (version, isotime())) if self.allowed is not None: if self.show_names: names = [ (value[1].name, infohash) for infohash, value in self.allowed.iteritems()] else: names = [(None, infohash) for infohash in self.allowed] else: names = [ (None, infohash) for infohash in self.downloads] if not names: s.write('<p>not tracking any files yet...</p>\n') else: names.sort() tn = 0 tc = 0 td = 0 tt = 0 # Total transferred ts = 0 # Total size nf = 0 # Number of files displayed if self.allowed is not None and self.show_names: s.write('<table summary="files" border="1">\n' \ '<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n') else: s.write('<table summary="files">\n' \ '<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n') for name, infohash in names: l = self.downloads[infohash] n = self.completed.get(infohash, 0) tn = tn + n c = self.seedcount[infohash] tc = tc + c d = len(l) - c td = td + d nf = nf + 1 if self.allowed is not None and self.show_names: if self.allowed.has_key(infohash): sz = self.allowed[infohash][1].total_bytes # size ts = ts + sz szt = sz * n # Transferred for this torrent tt = tt + szt if self.allow_get == 1: linkname = '<a href="/file?info_hash=' + quote(infohash) + '">' + name + '</a>' else: linkname = name s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \ % (b2a_hex(infohash), linkname, size_format(sz), c, d, n, size_format(szt))) else: s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \ % (b2a_hex(infohash), c, d, n)) ttn = 0 for i in self.completed.itervalues(): ttn = ttn + i if self.allowed is not None and self.show_names: s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td><td align="right">%s</td></tr>\n' % (nf, size_format(ts), tc, td, tn, ttn, size_format(tt))) else: s.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td></tr>\n' % (nf, tc, td, tn, ttn)) s.write('</table>\n' \ '<ul>\n' \ '<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \ '<li><em>complete:</em> number of connected clients with the complete file</li>\n' \ '<li><em>downloading:</em> number of connected clients still downloading</li>\n' \ '<li><em>downloaded:</em> reported complete downloads (total: current/all)</li>\n' \ '<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \ '</ul>\n') s.write('</body>\n' \ '</html>\n') return (200, 'OK', {'Content-Type': 'text/html; charset=iso-8859-1'}, s.getvalue()) except: print_exc() return (500, 'Internal Server Error', {'Content-Type': 'text/html; charset=iso-8859-1'}, 'Server Error') def scrapedata(self, infohash, return_name = True): l = self.downloads[infohash] n = self.completed.get(infohash, 0) c = self.seedcount[infohash] d = len(l) - c f = {'complete': c, 'incomplete': d, 'downloaded': n} if return_name and self.show_names and self.allowed is not None: f['name'] = self.allowed[infohash]['name'] return (f) def get_scrape(self, paramslist): fs = {} if paramslist.has_key('info_hash'): if self.config['scrape_allowed'] not in ['specific', 'full']: return (400, 'Not Authorized', default_headers, bencode({'failure_reason': "specific scrape function is not available with this tracker."})) for infohash in paramslist['info_hash']: if self.allowed is not None and infohash not in self.allowed: continue if infohash in self.downloads: fs[infohash] = self.scrapedata(infohash) else: if self.config['scrape_allowed'] != 'full': return (400, 'Not Authorized', default_headers, bencode({'failure reason': "full scrape function is not available with this tracker."})) #bencode({'failure reason': #_("full scrape function is not available with this tracker.")})) if self.allowed is not None: hashes = self.allowed else: hashes = self.downloads for infohash in hashes: fs[infohash] = self.scrapedata(infohash) return (200, 'OK', {'Content-Type': 'text/plain'}, bencode({'files': fs})) def get_file(self, infohash): if not self.allow_get: return (400, 'Not Authorized', default_headers, _("get function is not available with this tracker.")) if not self.allowed.has_key(infohash): return (404, 'Not Found', default_headers, alas) fname = self.allowed[infohash]['file'] fpath = self.allowed[infohash]['path'] return (200, 'OK', {'Content-Type': 'application/x-bittorrent', 'Content-Disposition': 'attachment; filename=' + fname}, open(fpath, 'rb').read()) def check_allowed(self, infohash, paramslist): if self.allowed is not None: if not self.allowed.has_key(infohash): return (200, 'Not Authorized', default_headers, bencode({'failure reason': "Requested download is not authorized for use with this tracker."})) #_("Requested download is not authorized for use with this tracker.")})) if self.config['allowed_controls']: if self.allowed[infohash].has_key('failure reason'): return (200, 'Not Authorized', default_headers, bencode({'failure reason': self.allowed[infohash]['failure reason']})) return None def add_data(self, infohash, event, ip, paramslist): peers = self.downloads.setdefault(infohash, {}) ts = self.times.setdefault(infohash, {}) self.completed.setdefault(infohash, 0) self.seedcount.setdefault(infohash, 0) def params(key, default = None, l = paramslist): if l.has_key(key): return l[key][0] return default myid = params('peer_id','') if len(myid) != 20: raise ValueError, 'id not of length 20' if event not in ['started', 'completed', 'stopped', 'snooped', None]: raise ValueError, 'invalid event' port = int(params('port','')) if port < 0 or port > 65535: raise ValueError, 'invalid port' left = int(params('left','')) if left < 0: raise ValueError, 'invalid amount left' peer = peers.get(myid) mykey = params('key') auth = not peer or peer.get('key', -1) == mykey or peer.get('ip') == ip gip = params('ip') local_override = gip and self.allow_local_override(ip, gip) if local_override: ip1 = gip else: ip1 = ip if not auth and local_override and self.only_local_override_ip: auth = True if params('numwant') is not None: rsize = min(int(params('numwant')), self.max_give) else: rsize = self.response_size if event == 'stopped': if peer and auth: self.delete_peer(infohash,myid) elif not peer: ts[myid] = time() peer = {'ip': ip, 'port': port, 'left': left} if mykey: peer['key'] = mykey if gip: peer['given ip'] = gip if port: if not self.natcheck or (local_override and self.only_local_override_ip): peer['nat'] = 0 self.natcheckOK(infohash,myid,ip1,port,left) else: NatCheck(self.connectback_result,infohash,myid,ip1,port,self.rawserver) else: peer['nat'] = 2**30 if event == 'completed': self.completed[infohash] += 1 if not left: self.seedcount[infohash] += 1 peers[myid] = peer else: if not auth: return rsize # return w/o changing stats ts[myid] = time() if not left and peer['left']: self.completed[infohash] += 1 self.seedcount[infohash] += 1 if not peer.get('nat', -1): for bc in self.becache[infohash]: bc[1][myid] = bc[0][myid] del bc[0][myid] if peer['left']: peer['left'] = left recheck = False if ip != peer['ip']: peer['ip'] = ip recheck = True if gip != peer.get('given ip'): if gip: peer['given ip'] = gip elif peer.has_key('given ip'): del peer['given ip'] if local_override: if self.only_local_override_ip: self.natcheckOK(infohash,myid,ip1,port,left) else: recheck = True if port and self.natcheck: if recheck: if peer.has_key('nat'): if not peer['nat']: l = self.becache[infohash] y = not peer['left'] for x in l: del x[y][myid] del peer['nat'] # restart NAT testing else: natted = peer.get('nat', -1) if natted and natted < self.natcheck: recheck = True if recheck: NatCheck(self.connectback_result,infohash,myid,ip1,port,self.rawserver) return rsize def peerlist(self, infohash, stopped, is_seed, return_type, rsize): data = {} # return data seeds = self.seedcount[infohash] data['complete'] = seeds data['incomplete'] = len(self.downloads[infohash]) - seeds if ( self.allowed is not None and self.config['allowed_controls'] and self.allowed[infohash].has_key('warning message') ): data['warning message'] = self.allowed[infohash]['warning message'] data['interval'] = self.reannounce_interval if stopped or not rsize: # save some bandwidth data['peers'] = [] return data bc = self.becache.setdefault(infohash,[[{}, {}], [{}, {}], [{}, {}]]) len_l = len(bc[0][0]) len_s = len(bc[0][1]) if not (len_l+len_s): # caches are empty! data['peers'] = [] return data l_get_size = int(float(rsize)*(len_l)/(len_l+len_s)) cache = self.cached.setdefault(infohash,[None,None,None])[return_type] if cache: if cache[0] + self.config['min_time_between_cache_refreshes'] < time(): cache = None else: if ( (is_seed and len(cache[1]) < rsize) or len(cache[1]) < l_get_size or not cache[1] ): cache = None if not cache: vv = [[],[],[]] cache = [ time(), bc[return_type][0].values()+vv[return_type], bc[return_type][1].values() ] shuffle(cache[1]) shuffle(cache[2]) self.cached[infohash][return_type] = cache for rr in xrange(len(self.cached[infohash])): if rr != return_type: try: self.cached[infohash][rr][1].extend(vv[rr]) except: pass if len(cache[1]) < l_get_size: peerdata = cache[1] if not is_seed: peerdata.extend(cache[2]) cache[1] = [] cache[2] = [] else: if not is_seed: peerdata = cache[2][l_get_size-rsize:] del cache[2][l_get_size-rsize:] rsize -= len(peerdata) else: peerdata = [] if rsize: peerdata.extend(cache[1][-rsize:]) del cache[1][-rsize:] if return_type == 2: peerdata = ''.join(peerdata) data['peers'] = peerdata return data def get(self, connection, path, headers): ip = connection.get_ip() nip = get_forwarded_ip(headers) if nip and not self.only_local_override_ip: ip = nip paramslist = {} def params(key, default = None, l = paramslist): if l.has_key(key): return l[key][0] return default try: (scheme, netloc, path, pars, query, fragment) = urlparse(path) if self.uq_broken == 1: path = path.replace('+',' ') query = query.replace('+',' ') path = unquote(path)[1:] for s in query.split('&'): if s != '': i = s.index('=') kw = unquote(s[:i]) paramslist.setdefault(kw, []) paramslist[kw] += [unquote(s[i+1:])] if path == '' or path == 'index.html': return self.get_infopage() if path == 'scrape': return self.get_scrape(paramslist) if (path == 'file'): return self.get_file(params('info_hash')) if path == 'favicon.ico' and self.favicon is not None: return (200, 'OK', {'Content-Type' : 'image/x-icon'}, self.favicon) if path != 'announce': return (404, 'Not Found', default_headers, alas) # main tracker function infohash = params('info_hash') if not infohash: raise ValueError, 'no info hash' notallowed = self.check_allowed(infohash, paramslist) if notallowed: if NOISY: self._print_event( "get: NOT ALLOWED: info_hash=%s, %s" % (infohash.encode('hex'). str(notallowed)) ) return notallowed event = params('event') rsize = self.add_data(infohash, event, ip, paramslist) except ValueError, e: print e if NOISY: self._print_exc( "get: ",e ) return (400, 'Bad Request', {'Content-Type': 'text/plain'}, 'you sent me garbage - ' + str_exc(e)) if params('compact'): return_type = 2 elif params('no_peer_id'): return_type = 1 else: return_type = 0 data = self.peerlist(infohash, event=='stopped', not params('left'), return_type, rsize) if paramslist.has_key('scrape'): data['scrape'] = self.scrapedata(infohash, False) return (200, 'OK', default_headers, bencode(data))
def sendErr(self, addr, tid, code, msg): ## send error out = bencode({TID:tid, TYP:ERR, ERR :(code, msg)}) olen = len(out) self.rltransport.sendto(out, 0, addr) return olen
def to_data(self): return bencode(self.metainfo)
def dump_fault(code, msg): return bencode({'y':'e', 'c':code, 's':msg})
def __init__(self, metainfo): """metainfo is a dict. When read from a metainfo (i.e., .torrent file), the file must first be bdecoded before being passed to ConvertedMetainfo.""" self.bad_torrent_wrongfield = False self.bad_torrent_unsolvable = False self.bad_torrent_noncharacter = False self.bad_conversion = False self.bad_windows = False self.bad_path = False self.reported_errors = False # All of the following values should be considered READONLY. # Modifications to the metainfo that should be written should # occur to the underlying metainfo dict directly. self.is_batch = False self.orig_files = None self.files_fs = None self.total_bytes = 0 self.sizes = [] self.comment = None self.title = None # descriptive title text for whole torrent self.creation_date = None self.metainfo = metainfo self.encoding = None self.caches = None btformats.check_message(metainfo, check_paths=False) info = metainfo['info'] self.is_private = info.has_key("private") and info['private'] if 'encoding' in metainfo: self.encoding = metainfo['encoding'] elif 'codepage' in metainfo: self.encoding = 'cp%s' % metainfo['codepage'] if self.encoding is not None: try: for s in u'this is a test', u'these should also work in any encoding: 0123456789\0': assert s.encode(self.encoding).decode(self.encoding) == s except: self.encoding = 'iso-8859-1' self.bad_torrent_unsolvable = True if info.has_key('length'): self.total_bytes = info['length'] self.sizes.append(self.total_bytes) if info.has_key('content_type'): self.content_type = info['content_type'] else: self.content_type = None # hasattr or None. Which is better? else: self.is_batch = True r = [] self.orig_files = [] self.sizes = [] self.content_types = [] i = 0 # info['files'] is a list of dicts containing keys: # 'length', 'path', and 'content_type'. The 'content_type' # key is optional. for f in info['files']: l = f['length'] self.total_bytes += l self.sizes.append(l) self.content_types.append(f.get('content_type')) path = self._get_attr(f, 'path') if len(path[-1]) == 0: if l > 0: raise BTFailure(_("Bad file path component: ")+x) # BitComet makes .torrent files with directories # listed along with the files, which we don't support # yet, in part because some idiot interpreted this as # a bug in BitComet rather than a feature. path.pop(-1) for x in path: if not btformats.allowed_path_re.match(x): raise BTFailure(_("Bad file path component: ")+x) self.orig_files.append('/'.join(path)) k = [] for u in path: tf2 = self._to_fs_2(u) k.append((tf2, u)) r.append((k,i)) i += 1 # If two or more file/subdirectory names in the same directory # would map to the same name after encoding conversions + Windows # workarounds, change them. Files are changed as # 'a.b.c'->'a.b.0.c', 'a.b.1.c' etc, directories or files without # '.' as 'a'->'a.0', 'a.1' etc. If one of the multiple original # names was a "clean" conversion, that one is always unchanged # and the rest are adjusted. r.sort() self.files_fs = [None] * len(r) prev = [None] res = [] stack = [{}] for x in r: j = 0 x, i = x while x[j] == prev[j]: j += 1 del res[j:] del stack[j+1:] name = x[j][0][1] if name in stack[-1]: for name in generate_names(x[j][1], j != len(x) - 1): name = self._to_fs(name) if name not in stack[-1]: break stack[-1][name] = None res.append(name) for j in xrange(j + 1, len(x)): name = x[j][0][1] stack.append({name: None}) res.append(name) self.files_fs[i] = os.path.join(*res) prev = x self.name = self._get_attr(info, 'name') self.name_fs = self._to_fs(self.name) self.piece_length = info['piece length'] self.announce = metainfo.get('announce') self.announce_list = metainfo.get('announce-list') if 'announce-list' not in metainfo and 'announce' not in metainfo: self.is_trackerless = True else: self.is_trackerless = False self.nodes = metainfo.get('nodes', [('router.bittorrent.com', 6881)]) self.title = metainfo.get('title') self.comment = metainfo.get('comment') self.creation_date = metainfo.get('creation date') self.locale = metainfo.get('locale') self.safe = metainfo.get('safe') self.url_list = metainfo.get('url-list', []) if not isinstance(self.url_list, list): self.url_list = [self.url_list, ] self.caches = metainfo.get('caches') self.hashes = [info['pieces'][x:x+20] for x in xrange(0, len(info['pieces']), 20)] self.infohash = InfoHashType(sha1(bencode(info)).digest())
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # Written by Henry 'Pi' James and Bram Cohen app_name = "BitTorrent" from BitTorrent.translation import _ from os.path import basename from sys import argv, exit from BTL.bencode import bencode, bdecode if len(argv) < 3: print _("Usage: %s TRACKER_URL [TORRENTFILE [TORRENTFILE ... ] ]") % basename(argv[0]) print exit(2) # common exit code for syntax error for f in argv[2:]: h = open(f, 'rb') metainfo = bdecode(h.read()) h.close() if metainfo['announce'] != argv[1]: print _("old announce for %s: %s") % (f, metainfo['announce']) metainfo['announce'] = argv[1] h = open(f, 'wb') h.write(bencode(metainfo)) h.close()
def dump_fault(code, msg): return bencode({'y': 'e', 'c': code, 's': msg})
def __init__(self, metainfo): """metainfo is a dict. When read from a metainfo (i.e., .torrent file), the file must first be bdecoded before being passed to ConvertedMetainfo.""" self.bad_torrent_wrongfield = False self.bad_torrent_unsolvable = False self.bad_torrent_noncharacter = False self.bad_conversion = False self.bad_windows = False self.bad_path = False self.reported_errors = False # All of the following values should be considered READONLY. # Modifications to the metainfo that should be written should # occur to the underlying metainfo dict directly. self.is_batch = False self.orig_files = None self.files_fs = None self.total_bytes = 0 self.sizes = [] self.comment = None self.title = None # descriptive title text for whole torrent self.creation_date = None self.metainfo = metainfo self.encoding = None self.caches = None #EZ micropayments are used self.micropayments = False btformats.check_message(metainfo, check_paths=False) info = metainfo['info'] self.is_private = info.has_key("private") and info['private'] if 'encoding' in metainfo: self.encoding = metainfo['encoding'] elif 'codepage' in metainfo: self.encoding = 'cp%s' % metainfo['codepage'] if self.encoding is not None: try: for s in u'this is a test', u'these should also work in any encoding: 0123456789\0': assert s.encode(self.encoding).decode(self.encoding) == s except: self.encoding = 'iso-8859-1' self.bad_torrent_unsolvable = True if info.has_key('length'): self.total_bytes = info['length'] self.sizes.append(self.total_bytes) if info.has_key('content_type'): self.content_type = info['content_type'] else: self.content_type = None # hasattr or None. Which is better? else: self.is_batch = True r = [] self.orig_files = [] self.sizes = [] self.content_types = [] i = 0 # info['files'] is a list of dicts containing keys: # 'length', 'path', and 'content_type'. The 'content_type' # key is optional. for f in info['files']: l = f['length'] self.total_bytes += l self.sizes.append(l) self.content_types.append(f.get('content_type')) path = self._get_attr(f, 'path') if len(path[-1]) == 0: if l > 0: raise BTFailure(_("Bad file path component: ")+x) # BitComet makes .torrent files with directories # listed along with the files, which we don't support # yet, in part because some idiot interpreted this as # a bug in BitComet rather than a feature. path.pop(-1) for x in path: if not btformats.allowed_path_re.match(x): raise BTFailure(_("Bad file path component: ")+x) self.orig_files.append('/'.join(path)) k = [] for u in path: tf2 = self._to_fs_2(u) k.append((tf2, u)) r.append((k,i)) i += 1 # If two or more file/subdirectory names in the same directory # would map to the same name after encoding conversions + Windows # workarounds, change them. Files are changed as # 'a.b.c'->'a.b.0.c', 'a.b.1.c' etc, directories or files without # '.' as 'a'->'a.0', 'a.1' etc. If one of the multiple original # names was a "clean" conversion, that one is always unchanged # and the rest are adjusted. r.sort() self.files_fs = [None] * len(r) prev = [None] res = [] stack = [{}] for x in r: j = 0 x, i = x while x[j] == prev[j]: j += 1 del res[j:] del stack[j+1:] name = x[j][0][1] if name in stack[-1]: for name in generate_names(x[j][1], j != len(x) - 1): name = self._to_fs(name) if name not in stack[-1]: break stack[-1][name] = None res.append(name) for j in xrange(j + 1, len(x)): name = x[j][0][1] stack.append({name: None}) res.append(name) self.files_fs[i] = os.path.join(*res) prev = x self.name = self._get_attr(info, 'name') self.name_fs = self._to_fs(self.name) self.piece_length = info['piece length'] self.announce = metainfo.get('announce') self.announce_list = metainfo.get('announce-list') if 'announce-list' not in metainfo and 'announce' not in metainfo: self.is_trackerless = True else: self.is_trackerless = False #EZ if 'micropayments' in metainfo and metainfo['micropayments'] == True: print "found micropayments ==true in metafile" self.micropayments = True self.nodes = metainfo.get('nodes', [('router.bittorrent.com', 6881)]) self.title = metainfo.get('title') self.comment = metainfo.get('comment') self.creation_date = metainfo.get('creation date') self.locale = metainfo.get('locale') self.safe = metainfo.get('safe') self.url_list = metainfo.get('url-list', []) if not isinstance(self.url_list, list): self.url_list = [self.url_list, ] self.caches = metainfo.get('caches') self.hashes = [info['pieces'][x:x+20] for x in xrange(0, len(info['pieces']), 20)] self.infohash = InfoHashType(sha(bencode(info)).digest())
def error(self, msg): self.response.headers['Content-Type'] = 'text/plain' self.response.out.write(bencode.bencode({ "failure reason" : msg }))
def error(self, msg): self.response.headers['Content-Type'] = 'text/plain' self.response.out.write(bencode.bencode({"failure reason": msg}))
def get(self): info_hash = util.GetParam('info_hash') if len(info_hash) != 20: self.error("Invalid info_hash argument (%d != 20)" % (len(info_hash))) return peer_id = self.request.get('peer_id') if len(peer_id) != 20: self.error("Invalid peer_id argument") return ip = os.environ['REMOTE_ADDR'] # TODO(aporter): Get the optional ip from the client (for proxies) try: port = int(self.request.get('port')) except ValueError: self.error("Invalid port argument") return try: uploaded = int(self.request.get('uploaded')) except ValueError: self.error("Invalid uploaded argument") return try: downloaded = int(self.request.get('downloaded')) except ValueError: self.error("Invalid downloaded argument") return try: left = int(self.request.get('left')) except ValueError: self.error("Invalid left argument") return try: compact = int(self.request.get('compact')) except ValueError: self.error("Invalid compact argument") return try: numwant = int(self.request.get('numwant')) except ValueError: self.error("Invalid numwant argument") return if compact != 0 and compact != 1: self.error("Invalid corrupt argument") return self._compact = (compact == 1) no_peer_id = not compact # TODO(aporter): Event handling event = self.request.get('event', None) # TODO(aporter): Used to prove identity to tracker if ip changes key = self.request.get('key') torrents = model.Torrent.gql("WHERE info_hash = :1", base64.b64encode(info_hash)) if torrents.count() == 1: torrent = torrents[0] else: torrent = model.Torrent(info_hash=base64.b64encode(info_hash)) torrent.put() peers = model.TorrentPeerEntry.gql( "WHERE torrent = :1 AND peer_id = :2 AND ip = :3 AND port = :4", torrent, peer_id, ip, port) if peers.count() == 1: peer = peers[0] # TODO(aporter): Should we reject peers with differing IP and port or # should they be allowed to update? This depends on if this peer id can # be guessed by others or not. peer.ip = ip peer.port = port peer.last_datetime = datetime.datetime.now() else: peer = model.TorrentPeerEntry( torrent=torrent, ip=ip, port=port, peer_id=peer_id, last_datetime=datetime.datetime.now()) peer.downloaded = downloaded peer.uploaded = uploaded peer.put() self.BuildPeersResult(torrent, peer_id) self.CleanupOldPeers(torrent) # TOOD(aporter): Respect maximum number of peers num_complete = 1 num_incomplete = 0 self.response.headers['Content-Type'] = 'text/plain' result = { "interval": 60, "complete": num_complete, "incomplete": num_incomplete, "peers": self._peers } self.response.out.write(bencode.bencode(result))