def render(self, context): context['info'] = {'leechers': 0, 'seeds': 0, 'downloaded': 0} if not context.has_key('torrent'): return '' from BitTornado.bencode import bdecode, bencode from sha import sha from urllib2 import URLError, urlopen fn = os.path.join(MEDIA_ROOT, 'torrents', context['torrent']) if not os.path.exists(fn): return '' try: f = open(fn, 'rb') meta = bdecode(f.read())['info'] meta = sha(bencode(meta)).digest() f.close() except (IOError, ValueError, KeyError): return '' try: f = urlopen(SCRAPE_URL) except URLError: return '' m = bdecode(f.read()) f.close() if not m.has_key('files'): return '' m = m['files'] if m.has_key(meta): m = m[meta] context['info']['seeds'], context['info']['leechers'], context['info']['downloaded'] = m['complete'], m['incomplete'], m['downloaded'] return ''
def getMetainfo(self, src, openoptions = 'rb', style = "file"): if src is None: return None metainfo = None try: metainfo_file = None # We're getting a url if style == "rawdata": return bdecode(src) elif style == "url": metainfo_file = urlopen(src) # We're getting a file that exists elif os.access(src, os.R_OK): metainfo_file = open(src, openoptions) if metainfo_file is not None: metainfo = bdecode(metainfo_file.read()) metainfo_file.close() except: if metainfo_file is not None: try: metainfo_file.close() except: pass metainfo = None return metainfo
def fetch(self,referer=None): if self.exists(): try: bdecode(open(self.filename(),'rb').read()) return except Exception,why: self.log.warn('invalid existing torrent: %s\n' % self.filename()) pass
def InitializeInfo(self, info = None): if info is not None: torrentName = self.info['name'] self.rawinfo = info self.info = self.metainfo['info'] = bdecode(info) # Meta data fixing: valid names, unicode names self.info['name'] = fixInvalidName(self.metainfo['info']['name']) if 'files' in self.info: for index in range(len(self.info['files'])): temppath = self.info['files'][index]['path'] self.info['files'][index]['path'] = [fixInvalidName(part) for part in temppath] self.metainfo = self.makeunicode(dict(self.metainfo)) # Torrent Parameters self.addedTime = 0 self.completedTime = 0 if info is None: # only apply once self.files = TorrentFiles(self) self.private = bool(self.info.get('private')) self.title = None self.prio = 2 self.message = "" self.totalpeers = "?" self.totalseeds = "?" else: self.files.__init__(self) self.private = bool(self.info.get('private')) self.writeSrc(torrentName) self.hasMetadata = True self.torrentconfig.writeSrc() self.updateColumns(force = True) self.files.updateRealSize()
def StringToValue(self, value, type): # Assume that the value is already in the proper form # if it's not a string # (the case for some defaults) if value is not None and not isinstance(value, str): return value try: if type == "boolean": if value == "1": value = True else: value = False elif type == "int": value = int(value) elif type == "float": value = float(value) elif type == "color": red = int(value[0:3]) green = int(value[3:6]) blue = int(value[6:9]) value = wx.Colour(red, green, blue) elif type.startswith("bencode"): value = bdecode(value) except: value = None if value is None: value = self.defaultvalues[type] return value
def _rerequest_single(self, t, s, l, callback): try: closer = [None] def timedout(self=self, l=l, closer=closer): if self.lock.trip(l): self.errorcodes[ 'troublecode'] = 'Problem connecting to tracker - timeout exceeded' self.lock.unwait(l) try: closer[0]() except: pass self.externalsched(timedout, self.timeout) err = None try: url, q = t.split('?', 1) q += '&' + s except: url = t q = s try: h = urlopen(url + '?' + q) closer[0] = h.close data = h.read() except (IOError, error), e: err = 'Problem connecting to tracker - ' + str(e) except: err = 'Problem connecting to tracker' try: h.close() except: pass if err: if self.lock.trip(l): self.errorcodes['troublecode'] = err self.lock.unwait(l) return if data == '': if self.lock.trip(l): self.errorcodes['troublecode'] = 'no data from tracker' self.lock.unwait(l) return try: r = bdecode(data, sloppy=1) check_peers(r) except ValueError, e: if self.lock.trip(l): self.errorcodes[ 'bad_data'] = 'bad data from tracker - ' + str(e) self.lock.unwait(l) return
def _rerequest_single(self, t, s, l, callback): try: closer = [None] def timedout(self=self, l=l, closer=closer): if self.lock.trip(l): self.errorcodes["troublecode"] = "Problem connecting to tracker - timeout exceeded" self.lock.unwait(l) try: closer[0]() except: pass self.externalsched(timedout, self.timeout) err = None try: url, q = t.split("?", 1) q += "&" + s except: url = t q = s try: h = urlopen(url + "?" + q) closer[0] = h.close data = h.read() except (IOError, error), e: err = "Problem connecting to tracker - " + str(e) except: err = "Problem connecting to tracker" try: h.close() except: pass if err: if self.lock.trip(l): self.errorcodes["troublecode"] = err self.lock.unwait(l) return if data == "": if self.lock.trip(l): self.errorcodes["troublecode"] = "no data from tracker" self.lock.unwait(l) return try: r = bdecode(data, sloppy=1) check_peers(r) except ValueError, e: if self.lock.trip(l): self.errorcodes["bad_data"] = "bad data from tracker - " + str(e) self.lock.unwait(l) return
def get_metainfo(src, openoptions = 'rb', style = "file", cookies = None): """ Get the metainfo for a torrent """ if src is None: return None metainfo = None try: metainfo_file = None if style == "rawdata": # Raw bencoded data return bdecode(src) if style == "url": # We're getting a url url_splitted = urlsplit(src) try: url_to_open = urlunsplit([url_splitted[0], url_splitted[1], quote(unquote(url_splitted[2])), url_splitted[3], url_splitted[4]]) except: url_to_open = src metainfo_file = urlopen(url_to_open, encoding = None, cookies = cookies) elif os.access(src, os.R_OK): # We're getting a file that exists metainfo_file = open(src, openoptions) if metainfo_file is not None: metainfo = bdecode(metainfo_file.read(), sloppy = 1) metainfo_file.close() except: if metainfo_file is not None: try: metainfo_file.close() except: pass metainfo = None return metainfo
def _announcecopy(self, f): try: h = open(f, 'rb') metainfo = bdecode(h.read()) h.close() self.announce = metainfo['announce'] if metainfo.has_key('announce-list'): self.announce_list = metainfo['announce-list'] else: self.announce_list = None except: return
def get_meta(self): fd = open(self.file, "rb") meta = bdecode(fd.read()) fd.close() if meta.has_key("announce-list"): announce = meta["announce"] found = 0 for tier in meta["announce-list"]: for tracker in tier: if tracker == announce: found = 1 if not found: meta["announce-list"].append([announce]) return meta
def datagramReceived(self, data, addr): # Handle node bans - algorithm taken from libtorrent match = None minimal = self.bans[0] now = clock() for i in self.bans: if i.src == addr[0]: match = i break if i.count < minimal.count: minimal = i if match is not None: match.count += 1 if match.count >= 20: if now < match.limit: # if match.count == 20: # print "Debug: DHT node banned for 5 minutes %s" % addr[0] match.limit = now + 300 return match.count = 0 match.limit = now + 5 else: minimal.count = 1 minimal.limit = now + 5 minimal.src = addr[0] # Handle message try: msg = bdecode(data) except: if DEBUG: print_exc() else: if DEBUG: print "datagramReceived", msg if msg[TYP] == REQ: f = getattr(self.factory, "krpc_" + msg[REQ], None) msg[ARG]["_krpc_sender"] = addr host = addr[0] if f and hasattr(f, "__call__"): try: ret = f(**msg[ARG]) except KrpcBaseError, e: if DEBUG2: print e, addr, msg.get(VER, "unknown client") self.sendMessage({TID: msg[TID], TYP: ERR, ERR: e.get()}, addr) except: print msg.get(VER, "unknown client") print_exc()
def _rerequest_single(self, t, s, l, callback): try: closer = [None] def timedout(self = self, l = l, closer = closer): if self.lock.trip(l): self.errorcodes['troublecode'] = 'Problem connecting to tracker - timeout exceeded' self.lock.unwait(l) try: closer[0]() except: pass self.externalsched(timedout, self.timeout) err = None try: h = urlopen(t+s) closer[0] = h.close data = h.read() except (IOError, error), e: err = 'Problem connecting to tracker - ' + str(e) except: err = 'Problem connecting to tracker' try: h.close() except: pass if err: if self.lock.trip(l): self.errorcodes['troublecode'] = err self.lock.unwait(l) return if data == '': if self.lock.trip(l): self.errorcodes['troublecode'] = 'no data from tracker' self.lock.unwait(l) return try: r = bdecode(data, sloppy=1) check_peers(r) except ValueError, e: if self.lock.trip(l): self.errorcodes['bad_data'] = 'bad data from tracker - ' + str(e) self.lock.unwait(l) return
def run(self): from sha import sha import os import time do_gui_operation(app.wTree.get_widget("label17").set_label , "<big><b>Planting Torrent " + os.path.basename(self.filename) +"</b></big>") infohash = sha(bencode(self.bdata['info'])).digest() url = ["http://anatomic.berlios.de/network/node-b/cache.php", "http://anatomic.berlios.de/network/node-a/cache.php"] if os.path.isfile('strackers.dat') is True and (time.time() - os.path.getmtime('strackers.dat')) < 2592000: # check if file exists and is recent(ish) try: f = open('strackers.dat', "r") except IOError, e: do_gui_operation(app.status.set_label, '<span color="red">Warning: Cannot open strackers.dat</span>') file = f.read() f.close() try: b2data = bdecode(file) url = b2data except ValueError, e: do_gui_operation(app.status.set_label, '<span color="red">Warning: strackers.dat cannot be decoded</span>')
def DoBackgroundListenAndLoad(self): """ Open any files specified in the given command line argument passed in via shared memory """ self._timer.Stop() self._sharedMemory.seek(0) if self._sharedMemory.read_byte() == '+': data = self._sharedMemory.read(1024-1) self._sharedMemory.seek(0) self._sharedMemory.write_byte("*") self._sharedMemory.flush() args = bdecode(data, sloppy = True) if type(args) == list: for arg in args: self.MacOpenFile(arg) utility.frame.taskbarCallback() self._timer.Start(1000) # 1 seconds interval
def startplant(self, widget, data=None): fatalerror = None # throws an error up if it is not present try: f = open(self.filename, "rb") file = f.read() f.close() data = bdecode(file) except (IOError, ValueError), e: if type(e) == IOError: fatalerror = '<span weight="bold" size="larger">' + self.filename + ' cannot be opened.</span>\r\rThe error returned was: ' + str(e) +'.' else: fatalerror = '<span weight="bold" size="larger">' + self.filename + ' cannot be decoded.</span>\r\rThe error returned was: ' + str(e) +'.' self.dialog = gtk.MessageDialog(None, 0, gtk.MESSAGE_ERROR) self.dialog.set_markup(fatalerror) self.dialog.set_position(gtk.WIN_POS_CENTER_ALWAYS) self.dialog.add_buttons(gtk.STOCK_OK,gtk.RESPONSE_OK) self.dialog.connect_object("delete_event", self.dialogdestroy, self.dialog) response = self.dialog.run() if response == gtk.RESPONSE_OK: # the response can't be much else self.dialog.destroy() self.status.set_label('<span color="red">Fatal Error: Select a new torrent or try again.</span>')
def post(self, *args): params = self.request.parameters() action = params.get('action') if action != "addfile": return environ = self.request.environ sock = environ['wsgi.input'] length = int(environ["CONTENT_LENGTH"]) data = StringIO() write = False endLine = None contentType = None while length: # Read line line = sock.readline() length -= len(line) # Header if not write: if line.startswith("---"): endLine = line elif line.startswith("Content-Type:"): contentType = line.split(" ")[1] else: write = line == '\r\n' continue # Data if contentType and not contentType.startswith("application/x-bittorrent"): break if line in endLine: break data.write(line) torrent = None try: torrent = bdecode(data.getvalue(), sloppy = 1) finally: data.close() if torrent: utility.queue.addTorrentFromMetainfoCallback(torrent, caller = "web")
def announcecopy(self, x): dl = wxFileDialog (self.frame, 'Choose .torrent file to use', '', '', '*.torrent', wxOPEN) if dl.ShowModal() == wxID_OK: try: h = open(dl.GetPath(), 'rb') metainfo = bdecode(h.read()) h.close() self.annCtl.SetValue(metainfo['announce']) if metainfo.has_key('announce-list'): list = [] for tier in metainfo['announce-list']: for tracker in tier: list += [tracker, ', '] del list[-1] list += ['\n'] liststring = '' for i in list: liststring += i self.annListCtl.SetValue(liststring+'\n\n') else: self.annListCtl.SetValue('') except: return
def _announcecopy(self, f, external = False): try: h = open(f, 'rb') metainfo = bdecode(h.read()) h.close() self.annCtl.SetValue(metainfo['announce']) if metainfo.has_key('announce-list'): list = [] for tier in metainfo['announce-list']: for tracker in tier: list += [tracker, ', '] del list[-1] list += ['\n'] liststring = '' for i in list: liststring += i self.annListCtl.SetValue(liststring+'\n\n') else: self.annListCtl.SetValue('') if external: self.choices.SetSelection(0) self.choices1.SetSelection(0) except: return
def _announcecopy(self, f, external=False): try: h = open(f, 'rb') metainfo = bdecode(h.read()) h.close() self.annCtl.SetValue(metainfo['announce']) if metainfo.has_key('announce-list'): list = [] for tier in metainfo['announce-list']: for tracker in tier: list += [tracker, ', '] del list[-1] list += ['\n'] liststring = '' for i in list: liststring += i self.annListCtl.SetValue(liststring + '\n\n') else: self.annListCtl.SetValue('') if external: self.choices.SetSelection(0) self.choices1.SetSelection(0) except: return
def announcecopy(self, x): dl = wxFileDialog(self.frame, 'Choose .torrent file to use', '', '', '*.torrent', wxOPEN) if dl.ShowModal() == wxID_OK: try: h = open(dl.GetPath(), 'rb') metainfo = bdecode(h.read()) h.close() self.annCtl.SetValue(metainfo['announce']) if metainfo.has_key('announce-list'): list = [] for tier in metainfo['announce-list']: for tracker in tier: list += [tracker, ', '] del list[-1] list += ['\n'] liststring = '' for i in list: liststring += i self.annListCtl.SetValue(liststring + '\n\n') else: self.annListCtl.SetValue('') except: return
def _rerequest_single(self, t, s, l, callback): try: closer = [None] def timedout(self=self, l=l, closer=closer): if self.lock.trip(l): self.errorcodes['troublecode'] = 'Problem connecting to ' \ 'tracker - timeout exceeded' self.lock.unwait(l) try: closer[0]() except: pass self.externalsched(timedout, self.timeout) err = None try: url, q = t.split('?', 1) q += '&' + s except: url = t q = s try: h = urlopen(url + '?' + q) closer[0] = h.close data = h.read() except (IOError, error) as e: err = 'Problem connecting to tracker - ' + str(e) except: err = 'Problem connecting to tracker' try: h.close() except: pass if err: if self.lock.trip(l): self.errorcodes['troublecode'] = err self.lock.unwait(l) return if data == '': if self.lock.trip(l): self.errorcodes['troublecode'] = 'no data from tracker' self.lock.unwait(l) return try: r = bdecode(data, sloppy=1) check_peers(r) except ValueError as e: if self.lock.trip(l): self.errorcodes['bad_data'] = 'bad data from tracker - ' \ + str(e) self.lock.unwait(l) return if 'failure reason' in r: if self.lock.trip(l): self.errorcodes['rejected'] = self.rejectedmessage + \ r['failure reason'] self.lock.unwait(l) return if self.lock.trip(l, True): # success! self.lock.unwait(l) else: # attempt timed out, don't do a callback callback = lambda: None # even if the attempt timed out, go ahead and process data def add(self=self, r=r, callback=callback): self.postrequest(r, callback) self.externalsched(add) except: self.exception(callback)
print 'anaupdatesnodes.py -url "URL OF A SUPERTRACKER" (if strackers.dat' print 'is not present locally)' DEFAULTURL = ["http://anatomic.berlios.de/network/node-b/cache.php", "http://anatomic.berlios.de/network/node-a/cache.php"] url = DEFAULTURL # for now until it gets changed import random random.shuffle(url) if os.path.isfile('strackers.dat') is True and (time.time() - os.path.getmtime('strackers.dat')) < 2592000: # check if file exists try: f = open('strackers.dat', "r") except IOError, e: print "WARNING Cannot open file:", e.filename else: file = f.readline() f.close() try: bdata = bdecode(file) except ValueError, e: print "WARNING: local strackers.dat is not valid BEncoded data: Using defaults" else: random.shuffle(bdata) url = bdata else: if len(argv) == 3 and argv[1] == "--url": possurl = argv[2] if possurl[0:7] == "http://": url = [argv[2]] else: print "NOT A VALID URL (NO http://) - USING DEFAULT SUPERTRACKER LIST" else: print "WARNING: USING DEFAULT SUPERTRACKER LIST"
list+=['|'] del list[-1] liststring = '' for i in list: liststring+=i return liststring if len(argv) < 3: a,b = split(argv[0]) print 'Usage: ' + b + ' <source.torrent> <file1.torrent> [file2.torrent...]' print 'copies announce information from source to all specified torrents' exit(2) # common exit code for syntax error h = open(argv[1], 'rb') source_metainfo = bdecode(h.read()) h.close() print 'new announce: ' + source_metainfo['announce'] if source_metainfo.has_key('announce-list'): print 'new announce-list: ' + give_announce_list(source_metainfo['announce-list']) for f in argv[2:]: h = open(f, 'rb') metainfo = bdecode(h.read()) h.close() print 'old announce for %s: %s' % (f, metainfo['announce']) metainfo['announce'] = source_metainfo['announce'] if metainfo.has_key('announce-list'): print 'old announce-list for %s: %s' % (f, give_announce_list(metainfo['announce-list']))
if argv[2] == '--announce_list': for tier in argv[3].split('|'): sublist = [] for tracker in tier.split(','): sublist += [tracker] announce_list += [sublist] if len(argv) < 5: print ('error: no .torrent files given') print ('') exit(2) argv = argv[2:] for f in argv[2:]: h = open(f, 'rb') metainfo = bdecode(h.read()) h.close() print 'old announce for %s: %s' % (f, metainfo['announce']) metainfo['announce'] = announce if metainfo.has_key('announce-list'): list = [] for tier in metainfo['announce-list']: for tracker in tier: list+=[tracker,','] del list[-1] list+=['|'] del list[-1] liststring = '' for i in list: liststring+=i print 'old announce-list for %s: %s' % (f, liststring)
def __init__(self, config, rawserver): self.config = config self.response_size = config['response_size'] self.dfile = config['dfile'] self.natcheck = config['nat_check'] favicon = config['favicon'] self.favicon = None if favicon: try: h = open(favicon,'r') self.favicon = h.read() h.close() except: print "**warning** specified favicon file -- %s -- does not exist." % favicon self.rawserver = rawserver self.cached = {} # format: infohash: [[time1, l1, s1], [time2, l2, s2], [time3, l3, s3]] self.cached_t = {} # format: infohash: [time, cache] self.times = {} self.state = {} self.seedcount = {} self.allowed_IPs = IP_List() if config['allowed_ips'] != '': self.allowed_IPs.read_fieldlist(config['allowed_ips']) self.only_local_override_ip = config['only_local_override_ip'] if self.only_local_override_ip == 2: self.only_local_override_ip = not config['nat_check'] if exists(self.dfile): try: h = open(self.dfile, 'rb') ds = h.read() h.close() tempstate = bdecode(ds) if not tempstate.has_key('peers'): tempstate = {'peers': tempstate} statefiletemplate(tempstate) self.state = tempstate except: print '**warning** statefile '+self.dfile+' corrupt; resetting' self.downloads = self.state.setdefault('peers', {}) self.completed = self.state.setdefault('completed', {}) self.becache = {} # format: infohash: [[l1, s1], [l2, s2], [l3, s3]] for infohash, ds in self.downloads.items(): self.seedcount[infohash] = 0 for x,y in ds.items(): if not y.get('nat',-1): ip = y.get('given_ip') if not (ip and self.allow_local_override(y['ip'], ip)): ip = y['ip'] self.natcheckOK(infohash,x,ip,y['port'],y['left']) if not y['left']: self.seedcount[infohash] += 1 for x in self.downloads.keys(): self.times[x] = {} for y in self.downloads[x].keys(): self.times[x][y] = 0 self.trackerid = createPeerID('-T-') seed(self.trackerid) self.reannounce_interval = config['reannounce_interval'] self.save_dfile_interval = config['save_dfile_interval'] self.show_names = config['show_names'] rawserver.add_task(self.save_dfile, self.save_dfile_interval) self.prevtime = clock() self.timeout_downloaders_interval = config['timeout_downloaders_interval'] rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval) self.logfile = None self.log = None if (config['logfile'] != '') and (config['logfile'] != '-'): try: self.logfile = config['logfile'] self.log = open(self.logfile,'a') sys.stdout = self.log print "# Log Started: ", isotime() except: print "**warning** could not redirect stdout to log file: ", sys.exc_info()[0] if config['hupmonitor']: def huphandler(signum, frame, self = self): try: self.log.close () self.log = open(self.logfile,'a') sys.stdout = self.log print "# Log reopened: ", isotime() except: print "**warning** could not reopen logfile" signal.signal(signal.SIGHUP, huphandler) self.allow_get = config['allow_get'] self.t2tlist = T2TList(config['multitracker_enabled'], self.trackerid, config['multitracker_reannounce_interval'], config['multitracker_maxpeers'], config['http_timeout'], self.rawserver) if config['allowed_dir'] != '': self.allowed_dir = config['allowed_dir'] self.parse_dir_interval = config['parse_dir_interval'] self.allowed = self.state.setdefault('allowed',{}) self.allowed_dir_files = self.state.setdefault('allowed_dir_files',{}) self.allowed_dir_blocked = {} self.parse_allowed() else: try: del self.state['allowed'] except: pass try: del self.state['allowed_dir_files'] except: pass self.allowed = None if config['multitracker_allowed'] == 'autodetect': config['multitracker_allowed'] = 'none' self.uq_broken = unquote('+') != ' ' self.keep_dead = config['keep_dead'] aggregator = config['aggregator'] if aggregator == '0': self.is_aggregator = False self.aggregator_key = None else: self.is_aggregator = True if aggregator == '1': self.aggregator_key = None else: self.aggregator_key = aggregator self.natcheck = False send = config['aggregate_forward'] if send == '': self.aggregate_forward = None else: try: self.aggregate_forward, self.aggregate_password = send.split(',') except: self.aggregate_forward = send self.aggregate_password = None
do_gui_operation(app.wTree.get_widget("image2").set_property, "stock", gtk.STOCK_MEDIA_PLAY) do_gui_operation(app.wTree.get_widget("label13").set_label, "<big>Communicating with supertrackers</big>") for x in url: stracker = x x += urlend try: import urllib2 f = urllib2.Request(x) f.add_header('User-agent', 'Anatomic P2P Planter Wizard GUI CVS Edition (S) +http://anatomic.berlios.de/' ) opener = urllib2.build_opener() data = opener.open(f).read() except IOError: pass else: try: bdata3 = bdecode(data) except ValueError: pass else: if len(bdata3) >= 8: status = 1 # i.e. successful first stage url.remove(stracker) # url becomes a list of other supertrackers break # if all of the strackers have been cycled through and nothing useful has been replied then die if status == 0: do_gui_operation(app.fatal, "<big><b>Error: No supertrackers were reached</b></big>\r\rThis is likely to be caused by a problem with your internet connection. If you are sure your internet connection is alive please run anaupdatesnodes to discover more supertrackers.\r\rPressing forward will close this wizard.") sys.exit(2) # or else # bdata3 is a single tracker left behind. tracker is going to have the querystring concatenated on it do_gui_operation(app.wTree.get_widget("image2").set_property, "stock", gtk.STOCK_APPLY) do_gui_operation(app.progress.set_fraction, 0.3)
def __init__(self, config, rawserver): self.config = config self.response_size = config['response_size'] self.dfile = config['dfile'] self.natcheck = config['nat_check'] favicon = config['favicon'] self.parse_dir_interval = config['parse_dir_interval'] self.favicon = None if favicon: try: with open(favicon, 'r') as h: self.favicon = h.read() except: print "**warning** specified favicon file -- %s -- does not " \ "exist." % favicon self.rawserver = rawserver self.cached = {} # format: infohash: [[time1, l1, s1], ...] self.cached_t = {} # format: infohash: [time, cache] self.times = {} self.state = {} self.seedcount = {} self.allowed_IPs = None self.banned_IPs = None if config['allowed_ips'] or config['banned_ips']: self.allowed_ip_mtime = 0 self.banned_ip_mtime = 0 self.read_ip_lists() self.only_local_override_ip = config['only_local_override_ip'] if self.only_local_override_ip == 2: self.only_local_override_ip = not config['nat_check'] if CHECK_PEER_ID_ENCRYPTED and not CRYPTO_OK: print '**warning** crypto library not installed, cannot ' \ 'completely verify encrypted peers' if os.path.exists(self.dfile): try: with open(self.dfile, 'rb') as h: ds = h.read() tempstate = bdecode(ds) if 'peers' not in tempstate: tempstate = {'peers': tempstate} statefiletemplate(tempstate) self.state = tempstate except: print '**warning** statefile ' + self.dfile + \ ' corrupt; resetting' self.downloads = self.state.setdefault('peers', {}) self.completed = self.state.setdefault('completed', {}) self.becache = {} ''' format: infohash: [[l0, s0], [l1, s1], ...] l0,s0 = compact, not requirecrypto=1 l1,s1 = compact, only supportcrypto=1 l2,s2 = [compact, crypto_flag], all peers if --compact_reqd 0: l3,s3 = [ip,port,id] l4,l4 = [ip,port] nopeerid ''' if config['compact_reqd']: x = 3 else: x = 5 self.cache_default = [({}, {}) for i in xrange(x)] for infohash, ds in self.downloads.iteritems(): self.seedcount[infohash] = 0 for x, y in ds.iteritems(): ip = y['ip'] if self.allowed_IPs and ip not in self.allowed_IPs \ or self.banned_IPs and ip in self.banned_IPs: del ds[x] continue if not y['left']: self.seedcount[infohash] += 1 if y.get('nat', -1): continue gip = y.get('given_ip') if is_valid_ip(gip) and (not self.only_local_override_ip or ip in local_IPs): ip = gip self.natcheckOK(infohash, x, ip, y['port'], y) for x in self.downloads: self.times[x] = {} for y in self.downloads[x]: self.times[x][y] = 0 self.trackerid = createPeerID('-T-') random.seed(self.trackerid) self.reannounce_interval = config['reannounce_interval'] self.save_dfile_interval = config['save_dfile_interval'] self.show_names = config['show_names'] rawserver.add_task(self.save_state, self.save_dfile_interval) self.prevtime = clock() self.timeout_downloaders_interval = config[ 'timeout_downloaders_interval'] rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval) self.logfile = None self.log = None if (config['logfile']) and (config['logfile'] != '-'): try: self.logfile = config['logfile'] self.log = open(self.logfile, 'a') sys.stdout = self.log print "# Log Started: ", isotime() except: print "**warning** could not redirect stdout to log file: " + \ sys.exc_info()[0] if config['hupmonitor']: def huphandler(signum, frame, self=self): try: self.log.close() self.log = open(self.logfile, 'a') sys.stdout = self.log print "# Log reopened: ", isotime() except: print "**warning** could not reopen logfile" signal.signal(signal.SIGHUP, huphandler) self.allow_get = config['allow_get'] self.t2tlist = T2TList(config['multitracker_enabled'], self.trackerid, config['multitracker_reannounce_interval'], config['multitracker_maxpeers'], config['http_timeout'], self.rawserver) if config['allowed_list']: if config['allowed_dir']: print '**warning** allowed_dir and allowed_list options ' \ 'cannot be used together' print '**warning** disregarding allowed_dir' config['allowed_dir'] = '' self.allowed = self.state.setdefault('allowed_list', {}) self.allowed_list_mtime = 0 self.parse_allowed() self.remove_from_state('allowed', 'allowed_dir_files') if config['multitracker_allowed'] == 'autodetect': config['multitracker_allowed'] = 'none' config['allowed_controls'] = 0 elif config['allowed_dir']: self.allowed = self.state.setdefault('allowed', {}) self.allowed_dir_files = self.state.setdefault( 'allowed_dir_files', {}) self.allowed_dir_blocked = set() self.parse_allowed() self.remove_from_state('allowed_list') else: self.allowed = None self.remove_from_state('allowed', 'allowed_dir_files', 'allowed_list') if config['multitracker_allowed'] == 'autodetect': config['multitracker_allowed'] = 'none' config['allowed_controls'] = 0 self.uq_broken = unquote('+') != ' ' self.keep_dead = config['keep_dead'] self.Filter = Filter(rawserver.add_task) aggregator = config['aggregator'] if aggregator == '0': self.is_aggregator = False self.aggregator_key = None else: self.is_aggregator = True if aggregator == '1': self.aggregator_key = None else: self.aggregator_key = aggregator self.natcheck = False send = config['aggregate_forward'] if not send: self.aggregate_forward = None else: sends = send.split(',') self.aggregate_forward = sends[0] self.aggregate_password = sends[1] if len(sends) > 1 else None self.dedicated_seed_id = config['dedicated_seed_id'] self.is_seeded = {} self.cachetime = 0 self.cachetimeupdate()
announce_list = [] if argv[2] == '--announce_list': for tier in argv[3].split('|'): sublist = [] for tracker in tier.split(','): sublist += [tracker] announce_list += [sublist] if len(argv) < 5: print('error: no .torrent files given') print('') exit(2) argv = argv[2:] for f in argv[2:]: h = open(f, 'rb') metainfo = bdecode(h.read()) h.close() print 'old announce for %s: %s' % (f, metainfo['announce']) metainfo['announce'] = announce if metainfo.has_key('announce-list'): list = [] for tier in metainfo['announce-list']: for tracker in tier: list += [tracker, ','] del list[-1] list += ['|'] del list[-1] liststring = '' for i in list: liststring += i print 'old announce-list for %s: %s' % (f, liststring)