def get_scrape(self, paramslist): fs = {} if paramslist.has_key('info_hash'): if self.config['scrape_allowed'] not in ['specific', 'full']: return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode({'failure reason': 'specific scrape function is not available with this tracker.'})) for infohash in paramslist['info_hash']: if self.allowed is not None and infohash not in self.allowed: continue if infohash in self.downloads: fs[infohash] = self.scrapedata(infohash) else: if self.config['scrape_allowed'] != 'full': return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode({'failure reason': 'full scrape function is not available with this tracker.'})) if self.allowed is not None: hashes = self.allowed else: hashes = self.downloads for infohash in hashes: fs[infohash] = self.scrapedata(infohash) return (200, 'OK', {'Content-Type': 'text/plain'}, bencode({'files': fs}))
def check_allowed(self, infohash, paramslist): if self.allowed is not None: if not self.allowed.has_key(infohash): return ( 200, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({ 'failure reason': _("Requested download is not authorized for use with this tracker." ) })) if self.config['allowed_controls']: if self.allowed[infohash].has_key('failure reason'): return (200, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({ 'failure reason': self.allowed[infohash]['failure reason'] })) return None
def get_scrape(self, paramslist): fs = {} if paramslist.has_key("info_hash"): if self.config["scrape_allowed"] not in ["specific", "full"]: return ( 400, "Not Authorized", {"Content-Type": "text/plain", "Pragma": "no-cache"}, bencode({"failure reason": _("specific scrape function is not available with this tracker.")}), ) for infohash in paramslist["info_hash"]: if self.allowed is not None and infohash not in self.allowed: continue if infohash in self.downloads: fs[infohash] = self.scrapedata(infohash) else: if self.config["scrape_allowed"] != "full": return ( 400, "Not Authorized", {"Content-Type": "text/plain", "Pragma": "no-cache"}, bencode({"failure reason": _("full scrape function is not available with this tracker.")}), ) if self.allowed is not None: hashes = self.allowed else: hashes = self.downloads for infohash in hashes: fs[infohash] = self.scrapedata(infohash) return (200, "OK", {"Content-Type": "text/plain"}, bencode({"files": fs}))
def init(self): self = super(Generate, self).init() appDefaults = {ANNOUNCEKEY:bencode([]), COMPLETEDIRKEY:0, GWINKEY:"", SWITCHKEY:1} defaults.registerDefaults_(appDefaults) x=defaults.objectForKey_(ANNOUNCEKEY) try: self.trackers = bdecode(x) except ValueError: if x[:7] == "http://": self.trackers = [str(x.encode("utf-8"))] else: self.trackers = [] defaults.setObject_forKey_(bencode(self.trackers), ANNOUNCEKEY) NSBundle.loadNibNamed_owner_("Metainfo", self) self.fname = None self.done = 0 self.gWindow.registerForDraggedTypes_([NSFilenamesPboardType]) self.gWindow.setFrameAutosaveName_(GWINKEY) self.gWindow.setFrameUsingName_(GWINKEY) try: self.announce.setStringValue_(self.trackers[0]) except IndexError: pass self.subCheck.setState_(defaults.objectForKey_(COMPLETEDIRKEY)) self.trackerPop.selectItemAtIndex_(defaults.objectForKey_(SWITCHKEY)) self.popped_(self.trackerPop) return self
def test_parse(self): data=Torrent.parse_torrent("res/test0.torrent") info=data["info"] info_hash_hex = hashlib.sha1(bencode.bencode(info)).hexdigest() self.assertEqual(info_hash_hex, "2034385a2621c53a490f34c5893a860664741da4") self.assertEqual(Torrent.get_info_hash_hex(data), "2034385a2621c53a490f34c5893a860664741da4") self.assertEqual(Torrent.get_name(data),"Super Eurobeat Vol. 220 - Anniversary Hits") data=Torrent.parse_torrent(open("res/test0.torrent","rb")) info=data["info"] info_hash_hex = hashlib.sha1(bencode.bencode(info)).hexdigest() self.assertEqual(info_hash_hex, "2034385a2621c53a490f34c5893a860664741da4") self.assertEqual(Torrent.get_info_hash_hex(data), "2034385a2621c53a490f34c5893a860664741da4") self.assertEqual(Torrent.get_name(data),"Super Eurobeat Vol. 220 - Anniversary Hits") data=Torrent.parse_torrent_data(open("res/test0.torrent","rb").read()) info=data["info"] info_hash_hex = hashlib.sha1(bencode.bencode(info)).hexdigest() self.assertEqual(info_hash_hex, "2034385a2621c53a490f34c5893a860664741da4") self.assertEqual(Torrent.get_info_hash_hex(data), "2034385a2621c53a490f34c5893a860664741da4") self.assertEqual(Torrent.get_name(data),"Super Eurobeat Vol. 220 - Anniversary Hits") data=Torrent.parse_torrent_data(open("res/test4.torrent","rb").read()) info=data["info"] info_hash_hex = hashlib.sha1(bencode.bencode(info)).hexdigest() self.assertEqual(info_hash_hex, "d6fbf1d74ba8275a8dfd0c5d8b30fc635fa3e5fc") self.assertEqual(Torrent.get_info_hash_hex(data), "d6fbf1d74ba8275a8dfd0c5d8b30fc635fa3e5fc") self.assertEqual(Torrent.get_name(data),"【副音轨】魔法少女小圆_第12话【Len个人翻译】.mp4")
def _got_peers(self, peers): if not self.howmany: return if not peers: self._postrequest(bencode({'peers':''}), peerid=self.wanted_peerid) else: self._postrequest(bencode({'peers':peers[0]}), peerid=None)
def _got_peers(self, peers): if not self.howmany: return if not peers: self._postrequest(bencode({'peers': ''}), peerid=self.wanted_peerid) else: self._postrequest(bencode({'peers': peers[0]}), peerid=None)
def check_allowed(self, infohash, paramslist): if self.allowed is not None: if not self.allowed.has_key(infohash): return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode({'failure reason': 'Requested download is not authorized for use with this tracker.'})) if self.config['allowed_controls']: if self.allowed[infohash].has_key('failure reason'): return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode({'failure reason': self.allowed[infohash]['failure reason']})) return None
def make_meta_file_dht(path, nodes, piece_len_exp, flag=Event(), progress=dummy, comment=None, target=None, encoding='ascii', data_dir=None): # if nodes is empty, then get them out of the routing table in data_dir # else, expect nodes to be a string of comma seperated <ip>:<port> pairs # this has a lot of duplicated code from make_meta_file piece_length = 2**piece_len_exp a, b = os.path.split(path) if not target: if b == '': f = a + '.torrent' else: f = os.path.join(a, b + '.torrent') else: f = target info = makeinfo(path, piece_length, flag, progress, encoding) if flag.isSet(): return check_info(info) info_hash = sha(bencode(info)).digest() if not nodes: x = open(os.path.join(data_dir, 'routing_table'), 'rb') d = bdecode(x.read()) x.close() t = KTable(Node().initWithDict({ 'id': d['id'], 'host': '127.0.0.1', 'port': 0 })) for n in d['rt']: t.insertNode(Node().initWithDict(n)) nodes = [(node.host, node.port) for node in t.findNodes(info_hash) if node.host != '127.0.0.1'] else: nodes = [ (a[0], int(a[1])) for a in [node.strip().split(":") for node in nodes.split(",")] ] data = {'nodes': nodes, 'creation date': int(time())} h = file(f, 'wb') data['info'] = info if comment: data['comment'] = comment h.write(bencode(data)) h.close()
def check_allowed(self, infohash, paramslist): if self.allowed is not None: if not self.allowed.has_key(infohash): return (200, 'Not Authorized', default_headers, bencode({'failure reason': "Requested download is not authorized for use with this tracker."})) #_("Requested download is not authorized for use with this tracker.")})) if self.config['allowed_controls']: if self.allowed[infohash].has_key('failure reason'): return (200, 'Not Authorized', default_headers, bencode({'failure reason': self.allowed[infohash]['failure reason']})) return None
def natcheckOK(self, infohash, peerid, ip, port, not_seed): bc = self.becache.setdefault(infohash, [[{}, {}], [{}, {}], [{}, {}]]) bc[0][not not_seed][peerid] = Bencached( bencode({ 'ip': ip, 'port': port, 'peer id': peerid })) bc[1][not not_seed][peerid] = Bencached( bencode({ 'ip': ip, 'port': port })) bc[2][not not_seed][peerid] = compact_peer_info(ip, port)
def _add_seed(self, group): # bc = self.becache[group].setdefault(infohash,[[{}, {}], [{}, {}], [{}, {}]]) for item in self.onlinesources.iteritems(): infohash = item[0] ip = item[1]["ip"] port = item[1]["port"] peerid = item[1]["peer id"] not_seed = item[1]["not seed"] bc = self.becache[group].setdefault(infohash, [[{}, {}], [{}, {}], [{}, {}]]) bc[0][not not_seed][peerid] = Bencached(bencode({"ip": ip, "port": port, "peer id": peerid})) bc[1][not not_seed][peerid] = Bencached(bencode({"ip": ip, "port": port})) bc[2][not not_seed][peerid] = compact_peer_info(ip, port)
def natcheckOK(self, infohash, peerid, ip, port, not_seed): print self.sourcelist for source in self.sourcelist: # print 'source',source source = source.rstrip() if source == ip and not not_seed: self.onlinesources[infohash] = {"peer id": peerid, "ip": ip, "port": port, "not seed": 0} print self.onlinesources return # bc = self.becache.setdefault(infohash,[[{}, {}], [{}, {}], [{}, {}]]) bc, group = self._get_peergroup(ip, infohash) bc[0][not not_seed][peerid] = Bencached(bencode({"ip": ip, "port": port, "peer id": peerid})) bc[1][not not_seed][peerid] = Bencached(bencode({"ip": ip, "port": port})) bc[2][not not_seed][peerid] = compact_peer_info(ip, port)
def make_meta_file(path, url, piece_len_exp, flag=Event(), progress=dummy, comment=None, target=None, encoding='ascii'): piece_length = 2**piece_len_exp a, b = os.path.split(path) if not target: if b == '': f = a + '.torrent' else: f = os.path.join(a, b + '.torrent') else: f = target info = makeinfo(path, piece_length, flag, progress, encoding) if flag.isSet(): return check_info(info) h = file(f, 'wb') data = { 'info': info, 'announce': url.strip(), 'creation date': int(time()) } if comment: data['comment'] = comment h.write(bencode(data)) h.close()
def sendErr(self, addr, tid, code, msg): ## send error out = bencode({TID: tid, TYP: ERR, ERR: (code, msg)}) olen = len(out) #self.rltransport.sendto(out, 0, addr) self.transport.write(out, addr) return olen
def make_meta_file(file, url, piece_len_exp, flag=Event(), progress=dummy, progress_percent=1, comment=None, target=None): if piece_len_exp == None: piece_len_exp = 18 piece_length = 2**piece_len_exp a, b = split(file) if not target: if b == '': f = a + '.torrent' else: f = join(a, b + '.torrent') else: f = target info = makeinfo(file, piece_length, flag, progress, progress_percent) if flag.isSet(): return check_info(info) h = open(f, 'wb') data = { 'info': info, 'announce': strip(url), 'creation date': long(time()) } if comment: data['comment'] = comment h.write(bencode(data)) h.close()
def make_meta_file(path, url, piece_len_exp, flag=Event(), progress=dummy, title=None, comment=None, target=None): data = {'announce': url.strip(), 'creation date': int(time())} piece_length = 2 ** piece_len_exp a, b = os.path.split(path) if not target: if b == '': f = a + '.torrent' else: f = os.path.join(a, b + '.torrent') else: f = target info = makeinfo(path, piece_length, flag, progress) if flag.isSet(): return check_info(info) h = file(f, 'wb') data['info'] = info # TODO: encoding lang = read_language_file() if lang: data['locale'] = lang if title: data['title'] = title if comment: data['comment'] = comment h.write(bencode(data)) h.close()
def sendErr(self, addr, tid, code, msg): ## send error out = bencode({TID:tid, TYP:ERR, ERR :(code, msg)}) olen = len(out) #self.rltransport.sendto(out, 0, addr) self.transport.write(out, addr) return olen
def torrent_hash(fname): f = open(fname, 'rb') d = bdecode(f.read()) f.close() check_message(d) hash = hashlib.sha1(bencode(d['info'])).hexdigest().upper() fn = os.path.basename(fname) return '%s - %s' % (hash,fn)
def torrent_hash(fname): f = open(fname, 'rb') d = bdecode(f.read()) f.close() check_message(d) hash = hashlib.sha1(bencode(d['info'])).hexdigest().upper() fn = os.path.basename(fname) return '%s - %s' % (hash, fn)
def check_allowed(self, infohash, paramslist): if self.allowed is not None: if not self.allowed.has_key(infohash): return ( 200, "Not Authorized", {"Content-Type": "text/plain", "Pragma": "no-cache"}, bencode({"failure reason": _("Requested download is not authorized for use with this tracker.")}), ) if self.config["allowed_controls"]: if self.allowed[infohash].has_key("failure reason"): return ( 200, "Not Authorized", {"Content-Type": "text/plain", "Pragma": "no-cache"}, bencode({"failure reason": self.allowed[infohash]["failure reason"]}), ) return None
def sendRequest(self, method, args): # make message # send it msg = {TID: chr(self.mtid), TYP: REQ, REQ: method, ARG: args} self.mtid = (self.mtid + 1) % 256 s = bencode(msg) d = Deferred() self.tids[msg[TID]] = d self.call_later(KRPC_TIMEOUT, self.timeOut, msg[TID]) self.call_later(0, self._send, s, msg[TID]) return d
def sendRequest(self, method, args): # make message # send it msg = {TID : chr(self.mtid), TYP : REQ, REQ : method, ARG : args} self.mtid = (self.mtid + 1) % 256 s = bencode(msg) d = Deferred() self.tids[msg[TID]] = d self.call_later(self.timeOut, KRPC_TIMEOUT, (msg[TID],)) self.call_later(self._send, 0, (s, d)) return d
def generate_(self, sender): panel = NSSavePanel.savePanel() switch = self.trackerPop.selectedItem().tag() self.gButton.setEnabled_(0) if switch != TLAUTO and self.announce.stringValue() == "": if switch == TRACKER: self.gButton.setEnabled_(1) NSRunAlertPanel( NSLocalizedString("Invalid Tracker URL", ""), NSLocalizedString( "You must enter the tracker URL. Contact the tracker administrator for the URL.", ""), None, None, None) return elif switch == TLNODES: self.gButton.setEnabled_(1) NSRunAlertPanel( NSLocalizedString("Invalid Trackerless Nodes", ""), NSLocalizedString( "To use this option, you must enter the IP-address:Port of one or more stable nodes.", ""), None, None, None) return elif self.fname == None: self.gButton.setEnabled_(1) NSRunAlertPanel( NSLocalizedString("Invalid File", "invalid file chose for generate"), NSLocalizedString( "You must drag a file or folder into the generate window first.", "empty file for generate"), None, None, None) return else: try: self.trackers.remove( str(self.announce.stringValue().encode("utf-8"))) except ValueError: pass self.trackers.insert(0, str(self.announce.stringValue().encode("utf-8"))) if len(self.trackers) > NUM_TRACKERS: self.trackers.pop() defaults.setObject_forKey_( bencode(map(lambda a: str(a.encode("utf-8")), self.trackers)), ANNOUNCEKEY) defaults.setObject_forKey_(self.subCheck.state(), COMPLETEDIRKEY) defaults.setObject_forKey_(self.trackerPop.selectedItem().tag(), SWITCHKEY) path, file = os.path.split(self.fname) base, ext = os.path.splitext(file) if self.subCheck.isEnabled() and self.subCheck.state(): self.prepareGenerateSaveFile_(self.fname) else: panel.beginSheetForDirectory_file_modalForWindow_modalDelegate_didEndSelector_contextInfo_( path, base + ".torrent", self.gWindow, self, self.savePanelDidEnd_returnCode_contextInfo_, 0)
def make_meta_file_dht(path, nodes, piece_len_exp, flag=Event(), progress=dummy, title=None, comment=None, target=None, data_dir=None): # if nodes is empty, then get them out of the routing table in data_dir # else, expect nodes to be a string of comma seperated <ip>:<port> pairs # this has a lot of duplicated code from make_meta_file piece_length = 2 ** piece_len_exp a, b = os.path.split(path) if not target: if b == '': f = a + '.torrent' else: f = os.path.join(a, b + '.torrent') else: f = target info = makeinfo(path, piece_length, flag, progress) if flag.isSet(): return check_info(info) info_hash = sha(bencode(info)).digest() if not nodes: x = open(os.path.join(data_dir, 'routing_table'), 'rb') d = bdecode(x.read()) x.close() t = KTable(Node().initWithDict({'id':d['id'], 'host':'127.0.0.1','port': 0})) for n in d['rt']: t.insertNode(Node().initWithDict(n)) nodes = [(node.host, node.port) for node in t.findNodes(info_hash) if node.host != '127.0.0.1'] else: nodes = [(a[0], int(a[1])) for a in [node.strip().split(":") for node in nodes.split(",")]] data = {'nodes': nodes, 'creation date': int(time())} h = file(f, 'wb') data['info'] = info if title: data['title'] = title if comment: data['comment'] = comment h.write(bencode(data)) h.close()
def get_scrape(self, paramslist): fs = {} if paramslist.has_key('info_hash'): if self.config['scrape_allowed'] not in ['specific', 'full']: return ( 400, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({ 'failure reason': _("specific scrape function is not available with this tracker." ) })) for infohash in paramslist['info_hash']: if self.allowed is not None and infohash not in self.allowed: continue if infohash in self.downloads: fs[infohash] = self.scrapedata(infohash) else: if self.config['scrape_allowed'] != 'full': return ( 400, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({ 'failure reason': _("full scrape function is not available with this tracker." ) })) if self.allowed is not None: hashes = self.allowed else: hashes = self.downloads for infohash in hashes: fs[infohash] = self.scrapedata(infohash) return (200, 'OK', { 'Content-Type': 'text/plain' }, bencode({'files': fs}))
def checkpoint(self, auto=0): d = {} d['id'] = self.node.id d['rt'] = self._dumpRoutingTable() try: f = open(os.path.join(self.ddir, "routing_table"), 'wb') f.write(bencode(d)) f.close() except Exception, e: #XXX real error here print ">>> unable to dump routing table!", str(e) pass
def init(self): self = super(Generate, self).init() appDefaults = { ANNOUNCEKEY: bencode([]), COMPLETEDIRKEY: 0, GWINKEY: "", SWITCHKEY: 1 } defaults.registerDefaults_(appDefaults) x = defaults.objectForKey_(ANNOUNCEKEY) try: self.trackers = bdecode(x) except ValueError: if x[:7] == "http://": self.trackers = [str(x.encode("utf-8"))] else: self.trackers = [] defaults.setObject_forKey_(bencode(self.trackers), ANNOUNCEKEY) NSBundle.loadNibNamed_owner_("Metainfo", self) self.fname = None self.done = 0 self.gWindow.registerForDraggedTypes_([NSFilenamesPboardType]) self.gWindow.setFrameAutosaveName_(GWINKEY) self.gWindow.setFrameUsingName_(GWINKEY) try: self.announce.setStringValue_(self.trackers[0]) except IndexError: pass self.subCheck.setState_(defaults.objectForKey_(COMPLETEDIRKEY)) self.trackerPop.selectItemAtIndex_(defaults.objectForKey_(SWITCHKEY)) self.popped_(self.trackerPop) return self
def updateStatus(self, timer): up = down = 0.0 for c in self.torrents: c.display() up += c.uprate down += c.downrate a, b = c.getUpDownSinceLast() self.tup += a self.tdown += b try: up += self.mt.dht.udp.rltransport.measure.get_rate() except AttributeError: pass self.upRateField.setStringValue_(utils.formRate(up)) self.downRateField.setStringValue_(utils.formRate(down)) self.defaults.setObject_forKey_(bencode(self.tup), ULBYTES) self.defaults.setObject_forKey_(bencode(self.tdown), DLBYTES) if self.terminated: if len(filter(lambda a: a.isRunning(), self.torrents)) == 0: self.defaults.synchronize() NSApp().terminate_(self)
def checkpoint(self, auto=0): d = {} d['id'] = self.node.id d['rt'] = self._dumpRoutingTable() try: f = open(os.path.join(self.ddir, "routing_table"), 'wb') f.write(bencode(d)) f.close() except: #XXX real error here print ">>> unable to dump routing table!", str(e) pass if auto: self.rawserver.add_task(self.checkpoint, randrange(int(const.CHECKPOINT_INTERVAL * .9), int(const.CHECKPOINT_INTERVAL * 1.1)), (1,))
def UploadTorrent(self, rsp, cls, uls): from BitTorrent.download import Download from BitTorrent.bencode import bencode, bdecode from sha import sha if uls == 0: return if uls == 1: flag = 'pub' elif uls == 2: flag = 'prv' f = open(rsp, "rb") a = f.read() f.close() d = Download() rd = d.ParseResponseFile(rsp) infohash = sha(bencode(rd['info'])).hexdigest() p = policy.get_policy() try: cBTS = Server( p(policy.CBT_RPCURL) ) status = cBTS.TorrentUpload(p(policy.CBT_LOGIN), p(policy.CBT_PASSWORD), {'tdata': a, 'tname':rsp, 'type': flag, 'hash': infohash, 'cat': cls}, self.userid ) if status['status']: self.parent.parent.log.AddMsg('MakeTorrent', _('Torrent upload finished.'), 'info') else: self.parent.parent.log.AddMsg('MakeTorrent', _('Error') + ': ' + str(status['msg']), 'error') dlg = wx.MessageDialog(self.frame, message = status['msg'], caption = 'Info', style = wx.OK | wx.ICON_INFORMATION) dlg.ShowModal() dlg.Destroy() except Exception, e: dlg = wx.MessageDialog(self.frame, message = _('Error') + ' - ' + str(e), caption = _('Error'), style = wx.OK | wx.ICON_ERROR) dlg.ShowModal() dlg.Destroy()
def make_meta_file(file, url, piece_len_exp = 18, flag = Event(), progress = dummy, progress_percent=1, comment = None, target = None): piece_length = 2 ** piece_len_exp a, b = split(file) if target == '': if b == '': f = a + '.torrent' else: f = join(a, b + '.torrent') else: f = target info = makeinfo(file, piece_length, flag, progress, progress_percent) if flag.isSet(): return check_info(info) h = open(f, 'wb') data = {'info': info, 'announce': strip(url), 'creation date': long(time())} if comment: data['comment'] = comment h.write(bencode(data)) h.close()
def make_meta_file(path, url, piece_len_exp, flag=Event(), progress=dummy, comment=None, target=None, encoding='ascii'): piece_length = 2 ** piece_len_exp a, b = os.path.split(path) if not target: if b == '': f = a + '.torrent' else: f = os.path.join(a, b + '.torrent') else: f = target info = makeinfo(path, piece_length, flag, progress, encoding) if flag.isSet(): return check_info(info) h = file(f, 'wb') data = {'info': info, 'announce': url.strip(),'creation date': int(time())} if comment: data['comment'] = comment h.write(bencode(data)) h.close()
def generate_(self, sender): panel = NSSavePanel.savePanel() switch = self.trackerPop.selectedItem().tag() self.gButton.setEnabled_(0) if switch != TLAUTO and self.announce.stringValue() == "": if switch == TRACKER: self.gButton.setEnabled_(1) NSRunAlertPanel(NSLocalizedString("Invalid Tracker URL", ""), NSLocalizedString("You must enter the tracker URL. Contact the tracker administrator for the URL.", ""), None, None, None) return elif switch == TLNODES: self.gButton.setEnabled_(1) NSRunAlertPanel(NSLocalizedString("Invalid Trackerless Nodes", ""), NSLocalizedString("To use this option, you must enter the IP-address:Port of one or more stable nodes.", ""), None, None, None) return elif self.fname == None: self.gButton.setEnabled_(1) NSRunAlertPanel(NSLocalizedString("Invalid File", "invalid file chose for generate"), NSLocalizedString("You must drag a file or folder into the generate window first.", "empty file for generate"), None, None, None) return else: try: self.trackers.remove(str(self.announce.stringValue().encode("utf-8"))) except ValueError: pass self.trackers.insert(0, str(self.announce.stringValue().encode("utf-8"))) if len(self.trackers) > NUM_TRACKERS: self.trackers.pop() defaults.setObject_forKey_(bencode(map(lambda a: str(a.encode("utf-8")), self.trackers)), ANNOUNCEKEY) defaults.setObject_forKey_(self.subCheck.state(), COMPLETEDIRKEY) defaults.setObject_forKey_(self.trackerPop.selectedItem().tag(), SWITCHKEY) path, file = os.path.split(self.fname) base, ext = os.path.splitext(file) if self.subCheck.isEnabled() and self.subCheck.state(): self.prepareGenerateSaveFile_(self.fname) else: panel.beginSheetForDirectory_file_modalForWindow_modalDelegate_didEndSelector_contextInfo_(path, base+".torrent", self.gWindow, self, self.savePanelDidEnd_returnCode_contextInfo_, 0)
def sendErr(self, addr, tid, msg): ## send error out = bencode({TID:tid, TYP:ERR, ERR :msg}) olen = len(out) self.rltransport.sendto(out, 0, addr) return olen
except KRPCFailSilently: pass except KRPCServerError, e: olen = self.sendErr(addr, msg[TID], 202, "Server Error: %s" % e.args[0]) except KRPCProtocolError, e: olen = self.sendErr(addr, msg[TID], 204, "Protocol Error: %s" % e.args[0]) except Exception, e: print_exc(20) olen = self.sendErr(addr, msg[TID], 202, "Server Error") else: if ret: # make response out = bencode({TID: msg[TID], TYP: RSP, RSP: ret}) else: out = bencode({TID: msg[TID], TYP: RSP, RSP: {}}) # send response olen = len(out) #self.rltransport.sendto(out, 0, addr) self.transport.write(out, addr) else: if self.noisy: #print "don't know about method %s" % msg[REQ] pass # unknown method olen = self.sendErr(addr, msg[TID], *KERR_METHOD_UNKNOWN) if self.noisy: try:
def __init__(self, metainfo): self.bad_torrent_wrongfield = False self.bad_torrent_unsolvable = False self.bad_torrent_noncharacter = False self.bad_conversion = False self.bad_windows = False self.bad_path = False self.reported_errors = False self.is_batch = False self.orig_files = None self.files_fs = None self.total_bytes = 0 self.sizes = [] self.comment = None btformats.check_message(metainfo, check_paths=False) info = metainfo['info'] if info.has_key('length'): self.total_bytes = info['length'] self.sizes.append(self.total_bytes) else: self.is_batch = True r = [] self.orig_files = [] self.sizes = [] i = 0 for f in info['files']: l = f['length'] self.total_bytes += l self.sizes.append(l) path = self._get_attr_utf8(f, 'path') for x in path: if not btformats.allowed_path_re.match(x): if l > 0: raise BTFailure(_("Bad file path component: ")+x) # BitComet makes bad .torrent files with empty # filename part self.bad_path = True break else: p = [] for x in path: p.append((self._enforce_utf8(x), x)) path = p self.orig_files.append('/'.join([x[0] for x in path])) k = [] for u,o in path: tf2 = self._to_fs_2(u) k.append((tf2, u, o)) r.append((k,i)) i += 1 # If two or more file/subdirectory names in the same directory # would map to the same name after encoding conversions + Windows # workarounds, change them. Files are changed as # 'a.b.c'->'a.b.0.c', 'a.b.1.c' etc, directories or files without # '.' as 'a'->'a.0', 'a.1' etc. If one of the multiple original # names was a "clean" conversion, that one is always unchanged # and the rest are adjusted. r.sort() self.files_fs = [None] * len(r) prev = [None] res = [] stack = [{}] for x in r: j = 0 x, i = x while x[j] == prev[j]: j += 1 del res[j:] del stack[j+1:] name = x[j][0][1] if name in stack[-1]: for name in generate_names(x[j][1], j != len(x) - 1): name = self._to_fs(name) if name not in stack[-1]: break stack[-1][name] = None res.append(name) for j in range(j + 1, len(x)): name = x[j][0][1] stack.append({name: None}) res.append(name) self.files_fs[i] = os.path.join(*res) prev = x self.name = self._get_field_utf8(info, 'name') self.name_fs = self._to_fs(self.name) self.piece_length = info['piece length'] self.is_trackerless = False if metainfo.has_key('announce'): self.announce = metainfo['announce'] elif metainfo.has_key('nodes'): self.is_trackerless = True self.nodes = metainfo['nodes'] if metainfo.has_key('comment'): self.comment = metainfo['comment'] self.hashes = [info['pieces'][x:x+20] for x in xrange(0, len(info['pieces']), 20)] self.infohash = sha(bencode(info)).digest()
def save_dfile(self): self.rawserver.add_task(self.save_dfile, self.save_dfile_interval) h = open(self.dfile, 'wb') h.write(bencode(self.state)) h.close()
if f and callable(f): try: ret = apply(f, (), msg[ARG]) except KRPCFailSilently: pass except KRPCServerError, e: olen = self.sendErr(addr, msg[TID], "Server Error: %s" % e.args[0]) except KRPCProtocolError, e: olen = self.sendErr(addr, msg[TID], "Protocol Error: %s" % e.args[0]) except Exception, e: print_exc(20) olen = self.sendErr(addr, msg[TID], "Server Error") else: if ret: # make response out = bencode({TID : msg[TID], TYP : RSP, RSP : ret}) else: out = bencode({TID : msg[TID], TYP : RSP, RSP : {}}) # send response olen = len(out) self.rltransport.sendto(out, 0, addr) else: if self.noisy: #print "don't know about method %s" % msg[REQ] pass # unknown method out = bencode({TID:msg[TID], TYP:ERR, ERR : KRPC_ERROR_METHOD_UNKNOWN}) olen = len(out) self.rltransport.sendto(out, 0, addr) if self.noisy:
def natcheckOK(self, infohash, peerid, ip, port, not_seed): bc = self.becache.setdefault(infohash,[[{}, {}], [{}, {}], [{}, {}]]) bc[0][not not_seed][peerid] = Bencached(bencode({'ip': ip, 'port': port, 'peer id': peerid})) bc[1][not not_seed][peerid] = Bencached(bencode({'ip': ip, 'port': port})) bc[2][not not_seed][peerid] = compact_peer_info(ip, port)
def __init__(self, metainfo): self.bad_torrent_wrongfield = False self.bad_torrent_unsolvable = False self.bad_conversion = False self.bad_windows = False self.reported_errors = False self.is_batch = False self.orig_files = None self.files_fs = None self.total_bytes = 0 self.sizes = [] btformats.check_message(metainfo) info = metainfo['info'] if info.has_key('length'): self.total_bytes = info['length'] self.sizes.append(self.total_bytes) else: self.is_batch = True r = [] self.orig_files = [] self.sizes = [] for i, f in enumerate(info['files']): l = f['length'] self.total_bytes += l self.sizes.append(l) path = self._get_attr_utf8(f, 'path') path = [(self._enforce_utf8(x), x) for x in path] self.orig_files.append('/'.join([x[0] for x in path])) r.append(([(self._to_fs_2(u), u, o) for u, o in path], i)) # If two or more file/subdirectory names in the same directory # would map to the same name after encoding conversions + Windows # workarounds, change them. Files are changed as # 'a.b.c'->'a.b.0.c', 'a.b.1.c' etc, directories or files without # '.' as 'a'->'a.0', 'a.1' etc. If one of the multiple original # names was a "clean" conversion, that one is always unchanged # and the rest are adjusted. r.sort() self.files_fs = [None] * len(r) prev = [None] res = [] stack = [{}] for x in r: j = 0 x, i = x while x[j] == prev[j]: j += 1 del res[j:] del stack[j + 1:] name = x[j][0][1] if name in stack[-1]: for name in generate_names(x[j][1], j != len(x) - 1): name = self._to_fs(name) if name not in stack[-1]: break stack[-1][name] = None res.append(name) for j in range(j + 1, len(x)): name = x[j][0][1] stack.append({name: None}) res.append(name) self.files_fs[i] = os.path.join(*res) prev = x self.name = self._get_field_utf8(info, 'name') self.name_fs = self._to_fs(self.name) self.piece_length = info['piece length'] self.announce = metainfo['announce'] self.hashes = [ info['pieces'][x:x + 20] for x in xrange(0, len(info['pieces']), 20) ] self.infohash = sha(bencode(info)).digest()
# Version 1.0 (the License). You may not copy or use this file, in either # source code or executable form, except in compliance with the License. You # may obtain a copy of the License at http://www.bittorrent.com/license/. # # Software distributed under the License is distributed on an AS IS basis, # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License # for the specific language governing rights and limitations under the # License. # Written by Henry 'Pi' James and Bram Cohen from sys import argv from BitTorrent.bencode import bencode, bdecode if len(argv) < 3: print '%s http://new.uri:port/announce file1.torrent file2.torrent' % argv[ 0] print exit(2) # common exit code for syntax error for f in argv[2:]: h = open(f, 'rb') metainfo = bdecode(h.read()) h.close() if metainfo['announce'] != argv[1]: print 'old announce for %s: %s' % (f, metainfo['announce']) metainfo['announce'] = argv[1] h = open(f, 'wb') h.write(bencode(metainfo)) h.close()
new_file = new_files[p] v = new_file[0] if new_file[1] in new_parsed: # duplicate if p not in blocked or files[p][0] != v: errfunc('**warning** ' + p + ' is a duplicate torrent for ' + new_parsed[new_file[1]]['path']) new_blocked[p] = None continue if NOISY: errfunc('adding ' + p) try: ff = open(p, 'rb') d = bdecode(ff.read()) check_message(d) h = sha(bencode(d['info'])).digest() new_file[1] = h if new_parsed.has_key(h): errfunc('**warning** ' + p + ' is a duplicate torrent for ' + new_parsed[h]['path']) new_blocked[p] = None continue a = {} a['path'] = p f = os.path.basename(p) a['file'] = f i = d['info'] l = 0 nf = 0 if i.has_key('length'):
new_file = new_files[p] v = new_file[0] if new_file[1] in new_parsed: # duplicate if p not in blocked or files[p][0] != v: errfunc(_("**warning** %s is a duplicate torrent for %s") % (p, new_parsed[new_file[1]]['path'])) new_blocked[p] = None continue if NOISY: errfunc('adding '+p) try: ff = open(p, 'rb') d = bdecode(ff.read()) check_message(d) h = sha(bencode(d['info'])).digest() new_file[1] = h if new_parsed.has_key(h): errfunc(_("**warning** %s is a duplicate torrent for %s") % (p, new_parsed[h]['path'])) new_blocked[p] = None continue a = {} a['path'] = p f = os.path.basename(p) a['file'] = f i = d['info'] l = 0 nf = 0 if i.has_key('length'):
def changeTracker(fdT, torrent, directory, trackerUrl): "takes a bdecoded torrent, changed tracker url, writes new torrent in directory, returns nothing" fdT['announce'] = trackerUrl fd = open(os.path.join(directory, torrent), 'wb') fd.write(bencode(fdT)) fd.close()
data['comment'] = params['comment'] if params.has_key('real_announce_list'): # shortcut for progs calling in from outside data['announce-list'] = params['real_announce_list'] elif params.has_key('announce_list') and params['announce_list']: l = [] for tier in params['announce_list'].split('|'): l.append(tier.split(',')) data['announce-list'] = l if params.has_key('real_httpseeds'): # shortcut for progs calling in from outside data['httpseeds'] = params['real_httpseeds'] elif params.has_key('httpseeds') and params['httpseeds']: data['httpseeds'] = params['httpseeds'].split('|') h.write(bencode(data)) h.close() def calcsize(file): if not isdir(file): return getsize(file) total = 0L for s in subfiles(abspath(file)): total += getsize(s[1]) return total def uniconvertl(l, e): r = [] try: for s in l:
# may obtain a copy of the License at http://www.bittorrent.com/license/. # # Software distributed under the License is distributed on an AS IS basis, # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License # for the specific language governing rights and limitations under the # License. # Written by Henry 'Pi' James and Bram Cohen from BitTorrent.translation import _ from os.path import basename from sys import argv, exit from BitTorrent.bencode import bencode, bdecode if len(argv) < 3: print _("Usage: %s TRACKER_URL [TORRENTFILE [TORRENTFILE ... ] ]") % basename(argv[0]) print exit(2) # common exit code for syntax error for f in argv[2:]: h = open(f, "rb") metainfo = bdecode(h.read()) h.close() if metainfo["announce"] != argv[1]: print _("old announce for %s: %s") % (f, metainfo["announce"]) metainfo["announce"] = argv[1] h = open(f, "wb") h.write(bencode(metainfo)) h.close()
class Tracker(object): def __init__(self, config, rawserver): self.config = config self.response_size = config['response_size'] self.max_give = config['max_give'] self.dfile = config['dfile'] self.natcheck = config['nat_check'] favicon = config['favicon'] self.favicon = None if favicon: try: h = open(favicon, 'r') self.favicon = h.read() h.close() except: print _( "**warning** specified favicon file -- %s -- does not exist." ) % favicon self.rawserver = rawserver self.cached = { } # format: infohash: [[time1, l1, s1], [time2, l2, s2], [time3, l3, s3]] self.cached_t = {} # format: infohash: [time, cache] self.times = {} self.state = {} self.seedcount = {} self.only_local_override_ip = config['only_local_override_ip'] if self.only_local_override_ip == 2: self.only_local_override_ip = not config['nat_check'] if os.path.exists(self.dfile): try: h = open(self.dfile, 'rb') ds = h.read() h.close() tempstate = bdecode(ds) if not tempstate.has_key('peers'): tempstate = {'peers': tempstate} statefiletemplate(tempstate) self.state = tempstate except: print _("**warning** statefile %s corrupt; resetting") % \ self.dfile self.downloads = self.state.setdefault('peers', {}) self.completed = self.state.setdefault('completed', {}) self.becache = {} # format: infohash: [[l1, s1], [l2, s2], [l3, s3]] for infohash, ds in self.downloads.items(): self.seedcount[infohash] = 0 for x, y in ds.items(): if not y.get('nat', -1): ip = y.get('given_ip') if not (ip and self.allow_local_override(y['ip'], ip)): ip = y['ip'] self.natcheckOK(infohash, x, ip, y['port'], y['left']) if not y['left']: self.seedcount[infohash] += 1 for infohash in self.downloads: self.times[infohash] = {} for peerid in self.downloads[infohash]: self.times[infohash][peerid] = 0 self.reannounce_interval = config['reannounce_interval'] self.save_dfile_interval = config['save_dfile_interval'] self.show_names = config['show_names'] rawserver.add_task(self.save_dfile, self.save_dfile_interval) self.prevtime = time() self.timeout_downloaders_interval = config[ 'timeout_downloaders_interval'] rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval) self.logfile = None self.log = None if (config['logfile'] != '') and (config['logfile'] != '-'): try: self.logfile = config['logfile'] self.log = open(self.logfile, 'a') sys.stdout = self.log print _("# Log Started: "), isotime() except: print _("**warning** could not redirect stdout to log file: " ), sys.exc_info()[0] if config['hupmonitor']: def huphandler(signum, frame, self=self): try: self.log.close() self.log = open(self.logfile, 'a') sys.stdout = self.log print _("# Log reopened: "), isotime() except: print _("**warning** could not reopen logfile") signal.signal(signal.SIGHUP, huphandler) self.allow_get = config['allow_get'] if config['allowed_dir'] != '': self.allowed_dir = config['allowed_dir'] self.parse_dir_interval = config['parse_dir_interval'] self.allowed = self.state.setdefault('allowed', {}) self.allowed_dir_files = self.state.setdefault( 'allowed_dir_files', {}) self.allowed_dir_blocked = {} self.parse_allowed() else: try: del self.state['allowed'] except: pass try: del self.state['allowed_dir_files'] except: pass self.allowed = None self.uq_broken = unquote('+') != ' ' self.keep_dead = config['keep_dead'] def allow_local_override(self, ip, given_ip): return is_valid_ipv4(given_ip) and (not self.only_local_override_ip or is_local_ip(ip)) def get_infopage(self): try: if not self.config['show_infopage']: return (404, 'Not Found', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, alas) red = self.config['infopage_redirect'] if red != '': return (302, 'Found', { 'Content-Type': 'text/html', 'Location': red }, '<A HREF="' + red + '">Click Here</A>') s = StringIO() s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \ '<html><head><title>BitTorrent download info</title>\n') if self.favicon is not None: s.write('<link rel="shortcut icon" href="/favicon.ico">\n') s.write('</head>\n<body>\n' \ '<h3>BitTorrent download info</h3>\n'\ '<ul>\n' '<li><strong>tracker version:</strong> %s</li>\n' \ '<li><strong>server time:</strong> %s</li>\n' \ '</ul>\n' % (version, isotime())) if self.allowed is not None: if self.show_names: names = [(value['name'], infohash) for infohash, value in self.allowed.iteritems()] else: names = [(None, infohash) for infohash in self.allowed] else: names = [(None, infohash) for infohash in self.downloads] if not names: s.write('<p>not tracking any files yet...</p>\n') else: names.sort() tn = 0 tc = 0 td = 0 tt = 0 # Total transferred ts = 0 # Total size nf = 0 # Number of files displayed if self.allowed is not None and self.show_names: s.write('<table summary="files" border="1">\n' \ '<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n') else: s.write('<table summary="files">\n' \ '<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n') for name, infohash in names: l = self.downloads[infohash] n = self.completed.get(infohash, 0) tn = tn + n c = self.seedcount[infohash] tc = tc + c d = len(l) - c td = td + d nf = nf + 1 if self.allowed is not None and self.show_names: if self.allowed.has_key(infohash): sz = self.allowed[infohash]['length'] # size ts = ts + sz szt = sz * n # Transferred for this torrent tt = tt + szt if self.allow_get == 1: linkname = '<a href="/file?info_hash=' + quote( infohash) + '">' + name + '</a>' else: linkname = name s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \ % (b2a_hex(infohash), linkname, size_format(sz), c, d, n, size_format(szt))) else: s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \ % (b2a_hex(infohash), c, d, n)) ttn = 0 for i in self.completed.values(): ttn = ttn + i if self.allowed is not None and self.show_names: s.write( '<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td><td align="right">%s</td></tr>\n' % (nf, size_format(ts), tc, td, tn, ttn, size_format(tt))) else: s.write( '<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td></tr>\n' % (nf, tc, td, tn, ttn)) s.write('</table>\n' \ '<ul>\n' \ '<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \ '<li><em>complete:</em> number of connected clients with the complete file</li>\n' \ '<li><em>downloading:</em> number of connected clients still downloading</li>\n' \ '<li><em>downloaded:</em> reported complete downloads (total: current/all)</li>\n' \ '<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \ '</ul>\n') s.write('</body>\n' \ '</html>\n') return (200, 'OK', { 'Content-Type': 'text/html; charset=iso-8859-1' }, s.getvalue()) except: print_exc() return (500, 'Internal Server Error', { 'Content-Type': 'text/html; charset=iso-8859-1' }, 'Server Error') def scrapedata(self, infohash, return_name=True): l = self.downloads[infohash] n = self.completed.get(infohash, 0) c = self.seedcount[infohash] d = len(l) - c f = {'complete': c, 'incomplete': d, 'downloaded': n} if return_name and self.show_names and self.allowed is not None: f['name'] = self.allowed[infohash]['name'] return (f) def get_scrape(self, paramslist): fs = {} if paramslist.has_key('info_hash'): if self.config['scrape_allowed'] not in ['specific', 'full']: return ( 400, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({ 'failure reason': _("specific scrape function is not available with this tracker." ) })) for infohash in paramslist['info_hash']: if self.allowed is not None and infohash not in self.allowed: continue if infohash in self.downloads: fs[infohash] = self.scrapedata(infohash) else: if self.config['scrape_allowed'] != 'full': return ( 400, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({ 'failure reason': _("full scrape function is not available with this tracker." ) })) if self.allowed is not None: hashes = self.allowed else: hashes = self.downloads for infohash in hashes: fs[infohash] = self.scrapedata(infohash) return (200, 'OK', { 'Content-Type': 'text/plain' }, bencode({'files': fs})) def get_file(self, infohash): if not self.allow_get: return (400, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, _("get function is not available with this tracker.")) if not self.allowed.has_key(infohash): return (404, 'Not Found', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, alas) fname = self.allowed[infohash]['file'] fpath = self.allowed[infohash]['path'] return (200, 'OK', { 'Content-Type': 'application/x-bittorrent', 'Content-Disposition': 'attachment; filename=' + fname }, open(fpath, 'rb').read()) def check_allowed(self, infohash, paramslist): if self.allowed is not None: if not self.allowed.has_key(infohash): return ( 200, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({ 'failure reason': _("Requested download is not authorized for use with this tracker." ) })) if self.config['allowed_controls']: if self.allowed[infohash].has_key('failure reason'): return (200, 'Not Authorized', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode({ 'failure reason': self.allowed[infohash]['failure reason'] })) return None def add_data(self, infohash, event, ip, paramslist): peers = self.downloads.setdefault(infohash, {}) ts = self.times.setdefault(infohash, {}) self.completed.setdefault(infohash, 0) self.seedcount.setdefault(infohash, 0) def params(key, default=None, l=paramslist): if l.has_key(key): return l[key][0] return default myid = params('peer_id', '') if len(myid) != 20: raise ValueError, 'id not of length 20' if event not in ['started', 'completed', 'stopped', 'snooped', None]: raise ValueError, 'invalid event' port = int(params('port', '')) if port < 0 or port > 65535: raise ValueError, 'invalid port' left = int(params('left', '')) if left < 0: raise ValueError, 'invalid amount left' peer = peers.get(myid) mykey = params('key') auth = not peer or peer.get('key', -1) == mykey or peer.get('ip') == ip gip = params('ip') local_override = gip and self.allow_local_override(ip, gip) if local_override: ip1 = gip else: ip1 = ip if not auth and local_override and self.only_local_override_ip: auth = True if params('numwant') is not None: rsize = min(int(params('numwant')), self.max_give) else: rsize = self.response_size if event == 'stopped': if peer and auth: self.delete_peer(infohash, myid) elif not peer: ts[myid] = time() peer = {'ip': ip, 'port': port, 'left': left} if mykey: peer['key'] = mykey if gip: peer['given ip'] = gip if port: if not self.natcheck or (local_override and self.only_local_override_ip): peer['nat'] = 0 self.natcheckOK(infohash, myid, ip1, port, left) else: NatCheck(self.connectback_result, infohash, myid, ip1, port, self.rawserver) else: peer['nat'] = 2**30 if event == 'completed': self.completed[infohash] += 1 if not left: self.seedcount[infohash] += 1 peers[myid] = peer else: if not auth: return rsize # return w/o changing stats ts[myid] = time() if not left and peer['left']: self.completed[infohash] += 1 self.seedcount[infohash] += 1 if not peer.get('nat', -1): for bc in self.becache[infohash]: bc[1][myid] = bc[0][myid] del bc[0][myid] if peer['left']: peer['left'] = left recheck = False if ip != peer['ip']: peer['ip'] = ip recheck = True if gip != peer.get('given ip'): if gip: peer['given ip'] = gip elif peer.has_key('given ip'): del peer['given ip'] if local_override: if self.only_local_override_ip: self.natcheckOK(infohash, myid, ip1, port, left) else: recheck = True if port and self.natcheck: if recheck: if peer.has_key('nat'): if not peer['nat']: l = self.becache[infohash] y = not peer['left'] for x in l: del x[y][myid] del peer['nat'] # restart NAT testing else: natted = peer.get('nat', -1) if natted and natted < self.natcheck: recheck = True if recheck: NatCheck(self.connectback_result, infohash, myid, ip1, port, self.rawserver) return rsize def peerlist(self, infohash, stopped, is_seed, return_type, rsize): data = {} # return data seeds = self.seedcount[infohash] data['complete'] = seeds data['incomplete'] = len(self.downloads[infohash]) - seeds if (self.allowed is not None and self.config['allowed_controls'] and self.allowed[infohash].has_key('warning message')): data['warning message'] = self.allowed[infohash]['warning message'] data['interval'] = self.reannounce_interval if stopped or not rsize: # save some bandwidth data['peers'] = [] return data bc = self.becache.setdefault(infohash, [[{}, {}], [{}, {}], [{}, {}]]) len_l = len(bc[0][0]) len_s = len(bc[0][1]) if not (len_l + len_s): # caches are empty! data['peers'] = [] return data l_get_size = int(float(rsize) * (len_l) / (len_l + len_s)) cache = self.cached.setdefault(infohash, [None, None, None])[return_type] if cache: if cache[0] + self.config[ 'min_time_between_cache_refreshes'] < time(): cache = None else: if ((is_seed and len(cache[1]) < rsize) or len(cache[1]) < l_get_size or not cache[1]): cache = None if not cache: vv = [[], [], []] cache = [ time(), bc[return_type][0].values() + vv[return_type], bc[return_type][1].values() ] shuffle(cache[1]) shuffle(cache[2]) self.cached[infohash][return_type] = cache for rr in xrange(len(self.cached[infohash])): if rr != return_type: try: self.cached[infohash][rr][1].extend(vv[rr]) except: pass if len(cache[1]) < l_get_size: peerdata = cache[1] if not is_seed: peerdata.extend(cache[2]) cache[1] = [] cache[2] = [] else: if not is_seed: peerdata = cache[2][l_get_size - rsize:] del cache[2][l_get_size - rsize:] rsize -= len(peerdata) else: peerdata = [] if rsize: peerdata.extend(cache[1][-rsize:]) del cache[1][-rsize:] if return_type == 2: peerdata = ''.join(peerdata) data['peers'] = peerdata return data def get(self, connection, path, headers): ip = connection.get_ip() nip = get_forwarded_ip(headers) if nip and not self.only_local_override_ip: ip = nip paramslist = {} def params(key, default=None, l=paramslist): if l.has_key(key): return l[key][0] return default try: (scheme, netloc, path, pars, query, fragment) = urlparse(path) if self.uq_broken == 1: path = path.replace('+', ' ') query = query.replace('+', ' ') path = unquote(path)[1:] for s in query.split('&'): if s != '': i = s.index('=') kw = unquote(s[:i]) paramslist.setdefault(kw, []) paramslist[kw] += [unquote(s[i + 1:])] if path == '' or path == 'index.html': return self.get_infopage() if path == 'scrape': return self.get_scrape(paramslist) if (path == 'file'): return self.get_file(params('info_hash')) if path == 'favicon.ico' and self.favicon is not None: return (200, 'OK', { 'Content-Type': 'image/x-icon' }, self.favicon) if path != 'announce': return (404, 'Not Found', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, alas) # main tracker function infohash = params('info_hash') if not infohash: raise ValueError, 'no info hash' notallowed = self.check_allowed(infohash, paramslist) if notallowed: return notallowed event = params('event') rsize = self.add_data(infohash, event, ip, paramslist) except ValueError, e: return (400, 'Bad Request', { 'Content-Type': 'text/plain' }, 'you sent me garbage - ' + str(e)) if params('compact'): return_type = 2 elif params('no_peer_id'): return_type = 1 else: return_type = 0 data = self.peerlist(infohash, event == 'stopped', not params('left'), return_type, rsize) if paramslist.has_key('scrape'): data['scrape'] = self.scrapedata(infohash, False) return (200, 'OK', { 'Content-Type': 'text/plain', 'Pragma': 'no-cache' }, bencode(data))
class Updater(object): def __init__(self, threadwrap, newversionfunc, startfunc, installfunc, errorfunc, test_new_version='', test_current_version=''): self.threadwrap = threadwrap # for calling back to UI from thread self.newversionfunc = newversionfunc # alert to new version UI function self.startfunc = startfunc # start torrent UI function self.installfunc = installfunc # install torrent UI function self.errorfunc = errorfunc # report error UI function self.infohash = None self.version = currentversion self.currentversion = currentversion self.asked_for_install = False self.version_site = version_host if os.name == 'nt': self.version_site += 'win32/' if os_version not in ('XP', '2000', '2003'): self.version_site += 'legacy/' elif osx: self.version_site += 'osx/' self.debug_mode = DEBUG if test_new_version: test_new_version = Version.from_str(test_new_version) self.debug_mode = True def _hack_get_available(url): return test_new_version self._get_available = _hack_get_available if test_current_version: self.debug_mode = True self.currentversion = Version.from_str(test_current_version) def debug(self, message): if self.debug_mode: self.threadwrap(self.errorfunc, WARNING, message) def _get_available(self, url): self.debug('Updater.get_available() hitting url %s' % url) try: u = zurllib.urlopen(url) s = u.read() s = s.strip() except: raise BTFailure(_("Could not get latest version from %s") % url) try: assert len(s) == 5 availableversion = Version.from_str(s) except: raise BTFailure( _("Could not parse new version string from %s") % url) return availableversion def get_available(self): url = self.version_site + self.currentversion.name() availableversion = self._get_available(url) if availableversion.is_beta(): if availableversion[1] != self.currentversion[1]: availableversion = self.currentversion if self.currentversion.is_beta(): stable_url = self.version_site + 'stable' available_stable_version = self._get_available(stable_url) if available_stable_version > availableversion: availableversion = available_stable_version self.version = availableversion self.debug('Updater.get_available() got %s' % str(self.version)) return self.version def get(self): try: self.get_available() except BTFailure, e: self.threadwrap(self.errorfunc, WARNING, e) return if self.version <= self.currentversion: self.debug('Updater.get() not updating old version %s' % str(self.version)) return if not self.can_install(): self.debug('Updater.get() cannot install on this os') return self.installer_name = self.calc_installer_name() self.installer_url = self.version_site + self.installer_name + '.torrent' self.installer_dir = self.calc_installer_dir() self.torrentfile = None torrentfile, terrors = GetTorrent.get_url(self.installer_url) signature = None try: signfile = zurllib.urlopen(self.installer_url + '.sign') except: self.debug('Updater.get() failed to get signfile %s.sign' % self.installer_url) else: try: signature = pickle.load(signfile) except: self.debug('Updater.get() failed to load signfile %s' % signfile) if terrors: self.threadwrap(self.errorfunc, WARNING, '\n'.join(terrors)) if torrentfile and signature: public_key_file = open(os.path.join(doc_root, 'public.key'), 'rb') public_key = pickle.load(public_key_file) h = sha(torrentfile).digest() if public_key.verify(h, signature): self.torrentfile = torrentfile b = bdecode(torrentfile) self.infohash = sha(bencode(b['info'])).digest() self.total_size = b['info']['length'] self.debug('Updater.get() got torrent file and signature') else: self.debug( 'Updater.get() torrent file signature failed to verify.') pass else: self.debug( 'Updater.get() doesn\'t have torrentfile %s and signature %s' % (str(type(torrentfile)), str(type(signature))))