class FakeCallback(): def __init__(self): from Tribler.Utilities.TimedTaskQueue import TimedTaskQueue self.queue = TimedTaskQueue("FakeCallback") def register(self, call, args=(), kargs=None, delay=0.0, priority=0, id_=u"", callback=None, callback_args=(), callback_kargs=None, include_id=False): def do_task(): if kargs: call(*args, **kargs) else: call(*args) if callback: if callback_kargs: callback(*callback_args, **callback_kargs) else: callback(*callback_args) self.queue.add_task(do_task, t=delay) def shutdown(self, immediately=False): self.queue.shutdown(immediately)
class TestTimedTaskQueue(unittest.TestCase): def setUp(self): self.queue = TimedTaskQueue() def tearDown(self): self.queue.shutdown() del self.queue def test_addTask(self): self.count = 0 self.queue.add_task(self.task3a, 3) self.queue.add_task(self.task0, 0) self.queue.add_task(self.task3b, 3) self.queue.add_task(self.task2, 1) sleep(6) assert self.count == 11 def task0(self): self.count += 1 assert self.count == 1 def task2(self): self.count += 2 assert self.count == 3 def task3a(self): self.count += 4 assert self.count == 7 or self.count == 11 def task3b(self): self.count += 4 assert self.count == 7 or self.count == 11 def test_addTask0FIFO(self): self.count = 0 self.queue.add_task(self.task0a, 0) self.queue.add_task(self.task0b, 0) self.queue.add_task(self.task0c, 0) self.queue.add_task(self.task0d, 0) sleep(6) assert self.count == 4 def task0a(self): assert self.count == 0 self.count = 1 def task0b(self): assert self.count == 1 self.count = 2 def task0c(self): assert self.count == 2 self.count = 3 def task0d(self): assert self.count == 3 self.count = 4
class RemoteTorrentHandler: __single = None def __init__(self): if RemoteTorrentHandler.__single: raise RuntimeError, "RemoteTorrentHandler is singleton" RemoteTorrentHandler.__single = self self.registered = False self._searchcommunity = None self.callbacks = {} self.trequesters = {} self.mrequesters = {} self.drequesters = {} self.tnrequester = None def getInstance(*args, **kw): if RemoteTorrentHandler.__single is None: RemoteTorrentHandler(*args, **kw) return RemoteTorrentHandler.__single getInstance = staticmethod(getInstance) def register(self, dispersy, session, max_num_torrents): self.session = session self.dispersy = dispersy self.max_num_torrents = max_num_torrents self.tor_col_dir = self.session.get_torrent_collecting_dir() from Tribler.Utilities.TimedTaskQueue import TimedTaskQueue self.tqueue = TimedTaskQueue("RemoteTorrentHandler") self.scheduletask = self.tqueue.add_task self.torrent_db = session.open_dbhandler('torrents') self.channel_db = self.session.open_dbhandler(NTFY_CHANNELCAST) self.drequesters[0] = MagnetRequester(self, 0) self.drequesters[1] = MagnetRequester(self, 1) self.tnrequester = ThumbnailRequester(self, self.session) self.registered = True startWorker(None, self.__check_overflow) def is_registered(self): return self.registered def shutdown(self): self.tqueue.shutdown() def __check_overflow(self): while True: num_torrents = self.torrent_db.getNumberCollectedTorrents() if DEBUG: print >>sys.stderr,"rtorrent: check overflow: current", num_torrents, "max", self.max_num_torrents if num_torrents > self.max_num_torrents: num_delete = int(num_torrents - self.max_num_torrents*0.95) num_per_step = max(25, num_delete / 180) print >> sys.stderr, "rtorrent: ** limit space::", num_torrents, self.max_num_torrents, num_delete while num_delete > 0: to_remove = min(num_delete, num_per_step) num_delete -= to_remove self.torrent_db.freeSpace(to_remove) yield 5.0 LOW_PRIO_COLLECTING = 4 elif num_torrents > (self.max_num_torrents * .75): LOW_PRIO_COLLECTING = 3 else: LOW_PRIO_COLLECTING = 2 if DEBUG: print >> sys.stderr, "rtorrent: setting low_prio_collection to one .torrent every %.1f seconds"%(LOW_PRIO_COLLECTING *.5) yield 30 * 60.0 #run every 30 minutes @property def searchcommunity(self): if self.registered: if not self._searchcommunity: from Tribler.community.search.community import SearchCommunity for community in self.dispersy.get_communities(): if isinstance(community, SearchCommunity): self._searchcommunity = community break return self._searchcommunity def has_thumbnail(self, infohash): thumb_dir = os.path.join(self.tor_col_dir, 'thumbs-'+binascii.hexlify(infohash)) return os.path.isdir(thumb_dir) and os.listdir(thumb_dir) def download_thumbnail(self, candidate, roothash, infohash, usercallback = None, timeout = None): if self.registered and not self.has_thumbnail(roothash): raw_lambda = lambda candidate=candidate, roothash=roothash, infohash=infohash, usercallback=usercallback, timeout = timeout: self._download_thumbnail(candidate, roothash, infohash, usercallback, timeout) self.scheduletask(raw_lambda) def _download_thumbnail(self, candidate, roothash, infohash, usercallback, timeout): if usercallback: self.callbacks.setdefault(roothash, set()).add(usercallback) self.tnrequester.add_request((roothash, infohash), candidate, timeout) if DEBUG: print >> sys.stderr,'rtorrent: adding thumbnail request:', roothash or '', candidate def download_torrent(self, candidate, infohash = None, roothash = None, usercallback = None, prio = 1, timeout = None): if self.registered: raw_lambda = lambda candidate=candidate, infohash=infohash, roothash=roothash, usercallback=usercallback, prio=prio, timeout = timeout: self._download_torrent(candidate, infohash, roothash, usercallback, prio, timeout) self.scheduletask(raw_lambda) def _download_torrent(self, candidate, infohash, roothash, usercallback, prio, timeout): if self.registered: assert infohash or roothash, "We need either the info or roothash" doSwiftCollect = candidate and roothash if doSwiftCollect: requesters = self.trequesters hash = (infohash, roothash) elif infohash: requesters = self.drequesters hash = infohash #fix prio levels to 1 and 0 prio = min(prio, 1) else: return if usercallback: self.callbacks.setdefault(hash, set()).add(usercallback) #look for lowest prio requester, which already has this infohash scheduled requester = None for i in range(prio, prio + 1): if i in requesters and requesters[i].is_being_requested(hash): requester = requesters[i] break #if not found, then used/create this requester if not requester: if prio not in requesters: if doSwiftCollect: requesters[prio] = TorrentRequester(self, self.drequesters[1], self.session, prio) else: requesters[prio] = MagnetRequester(self, prio) requester = requesters[prio] #make request requester.add_request(hash, candidate, timeout) if DEBUG: print >>sys.stderr,'rtorrent: adding torrent request:', bin2str(infohash or ''), bin2str(roothash or ''), candidate, prio def download_torrentmessages(self, candidate, infohashes, usercallback = None, prio = 1): if self.registered: raw_lambda = lambda candidate=candidate, infohashes=infohashes, usercallback=usercallback, prio=prio: self._download_torrentmessages(candidate, infohashes, usercallback, prio) self.scheduletask(raw_lambda) def _download_torrentmessages(self, candidate, infohashes, usercallback, prio): assert all(isinstance(infohash, str) for infohash in infohashes), "INFOHASH has invalid type" assert all(len(infohash) == INFOHASH_LENGTH for infohash in infohashes), "INFOHASH has invalid length:" if self.registered: if usercallback: for infohash in infohashes: callback = lambda infohash=infohash: usercallback(infohash) self.callbacks.setdefault((infohash,None), set()).add(callback) if prio not in self.mrequesters: self.mrequesters[prio] = TorrentMessageRequester(self, self.searchcommunity, prio) requester = self.mrequesters[prio] #make request requester.add_request(frozenset(infohashes), candidate) if DEBUG: print >>sys.stderr,'rtorrent: adding torrent messages request:', map(bin2str, infohashes), candidate, prio def has_torrent(self, infohash, callback): startWorker(None, self._has_torrent, wargs = (infohash, self.tor_col_dir, callback)) def _has_torrent(self, infohash, tor_col_dir, callback): #save torrent result = False torrent = self.torrent_db.getTorrent(infohash, ['torrent_file_name', 'swift_torrent_hash'], include_mypref = False) if torrent: if torrent.get('torrent_file_name', False) and os.path.isfile(torrent['torrent_file_name']): result = torrent['torrent_file_name'] elif torrent.get('swift_torrent_hash', False): sdef = SwiftDef(torrent['swift_torrent_hash']) torrent_filename = os.path.join(tor_col_dir, sdef.get_roothash_as_hex()) if os.path.isfile(torrent_filename): self.torrent_db.updateTorrent(infohash, notify=False, torrent_file_name=torrent_filename) result = torrent_filename raw_lambda = lambda result=result: callback(result) self.scheduletask(raw_lambda) def save_torrent(self, tdef, callback = None): if self.registered: def do_schedule(filename): if not filename: self._save_torrent(tdef, callback) elif callback: startWorker(None, callback) infohash = tdef.get_infohash() self.has_torrent(infohash, do_schedule) def _save_torrent(self, tdef, callback = None): tmp_filename = os.path.join(self.session.get_torrent_collecting_dir(), "tmp_"+get_collected_torrent_filename(tdef.get_infohash())) filename_index = 0 while os.path.exists(tmp_filename): filename_index += 1 tmp_filename = os.path.join(self.session.get_torrent_collecting_dir(), ("tmp_%d_"%filename_index)+get_collected_torrent_filename(tdef.get_infohash())) tdef.save(tmp_filename) sdef, swiftpath = self._write_to_collected(tmp_filename) try: os.remove(tmp_filename) except: atexit.register(lambda tmp_filename=tmp_filename: os.remove(tmp_filename)) def do_db(callback): #add this new torrent to db infohash = tdef.get_infohash() if self.torrent_db.hasTorrent(infohash): self.torrent_db.updateTorrent(infohash, swift_torrent_hash = sdef.get_roothash(), torrent_file_name = swiftpath) else: self.torrent_db.addExternalTorrent(tdef, extra_info = {'filename': swiftpath, 'swift_torrent_hash':sdef.get_roothash(), 'status':'good'}) #notify all self.notify_possible_torrent_infohash(infohash, True) if callback: callback() startWorker(None, do_db, wargs = (callback, )) def _write_to_collected(self, filename): #calculate root-hash sdef = SwiftDef() sdef.add_content(filename) sdef.finalize(self.session.get_swift_path(), destdir = self.session.get_torrent_collecting_dir()) mfpath = os.path.join(self.session.get_torrent_collecting_dir(),sdef.get_roothash_as_hex()) if not os.path.exists(mfpath): download = self.session.get_download(sdef.get_roothash()) if download: self.session.remove_download(download, removestate = True) sleep(1) elif os.path.exists(mfpath + ".mhash"): #indicating failed swift download os.remove(mfpath + ".mhash") try: shutil.copy(filename, mfpath) shutil.move(filename+'.mhash', mfpath+'.mhash') shutil.move(filename+'.mbinmap', mfpath+'.mbinmap') except: print_exc() return sdef, mfpath def notify_possible_torrent_roothash(self, roothash): keys = self.callbacks.keys() for key in keys: if key[1] == roothash: handle_lambda = lambda key=key: self._handleCallback(key, True) self.scheduletask(handle_lambda) def do_db(tdef): if self.torrent_db.hasTorrent(tdef.get_infohash()): self.torrent_db.updateTorrent(tdef.get_infohash(), swift_torrent_hash = sdef.get_roothash(), torrent_file_name = swiftpath) else: self.torrent_db._addTorrentToDB(tdef, source = "SWIFT", extra_info = {'filename': swiftpath, 'swift_torrent_hash':roothash, 'status':'good'}, commit = True) sdef = SwiftDef(roothash) swiftpath = os.path.join(self.session.get_torrent_collecting_dir(),sdef.get_roothash_as_hex()) if os.path.exists(swiftpath): try: tdef = TorrentDef.load(swiftpath) startWorker(None, do_db, wargs = (tdef, )) except: #ignore if tdef loading fails pass def notify_possible_thumbnail_roothash(self, roothash): keys = self.callbacks.keys() for key in keys: if key == roothash: handle_lambda = lambda key=key: self._handleCallback(key, True) self.scheduletask(handle_lambda) print >>sys.stderr,'rtorrent: finished downloading thumbnail:', binascii.hexlify(roothash) def notify_possible_torrent_infohash(self, infohash, actualTorrent = False): keys = self.callbacks.keys() for key in keys: if key[0] == infohash or key == infohash: handle_lambda = lambda key=key, actualTorrent=actualTorrent: self._handleCallback(key, actualTorrent) self.scheduletask(handle_lambda) def _handleCallback(self, key, torrent = True): if DEBUG: print >>sys.stderr,'rtorrent: got torrent for:', key if key in self.callbacks: for usercallback in self.callbacks[key]: self.session.uch.perform_usercallback(usercallback) del self.callbacks[key] if torrent: for requester in self.trequesters.values(): if requester.is_being_requested(key): requester.remove_request(key) for requester in self.drequesters.values(): if requester.is_being_requested(key): requester.remove_request(key) else: for requester in self.mrequesters.values(): if requester.is_being_requested(key): requester.remove_request(key) def getQueueSize(self): def getQueueSize(qname, requesters): qsize = {} for requester in requesters.itervalues(): if len(requester.sources): qsize[requester.prio] = len(requester.sources) items = qsize.items() if items: items.sort() return "%s: "%qname + ",".join(map(str, items)) return '' return ", ".join([qstring for qstring in [getQueueSize("TQueue", self.trequesters), getQueueSize("DQueue", self.drequesters), getQueueSize("MQueue", self.mrequesters)] if qstring]) def getQueueSuccess(self): def getQueueSuccess(qname, requesters): sum_requests = sum_success = 0 print_value = False for requester in requesters.itervalues(): if requester.requests_success >= 0: print_value = True sum_requests += requester.requests_made sum_success += requester.requests_success if print_value: return "%s: %d/%d"%(qname, sum_success, sum_requests) return '' return ", ".join([qstring for qstring in [getQueueSuccess("TQueue", self.trequesters), getQueueSuccess("DQueue", self.drequesters), getQueueSuccess("MQueue", self.mrequesters)] if qstring])
class RemoteTorrentHandler: __single = None def __init__(self): RemoteTorrentHandler.__single = self self.registered = False self._searchcommunity = None self.callbacks = {} self.trequesters = {} self.mrequesters = {} self.drequesters = {} self.tnrequester = None self.num_torrents = 0 def getInstance(*args, **kw): if RemoteTorrentHandler.__single is None: RemoteTorrentHandler(*args, **kw) return RemoteTorrentHandler.__single getInstance = staticmethod(getInstance) def delInstance(*args, **kw): RemoteTorrentHandler.__single = None delInstance = staticmethod(delInstance) def register(self, dispersy, database_thead, session, max_num_torrents): self.session = session self.dispersy = dispersy self.database_thead = database_thead self.max_num_torrents = max_num_torrents self.tor_col_dir = self.session.get_torrent_collecting_dir() from Tribler.Utilities.TimedTaskQueue import TimedTaskQueue self.tqueue = TimedTaskQueue("RemoteTorrentHandler") self.scheduletask = self.tqueue.add_task self.torrent_db = None if self.session.get_megacache(): self.torrent_db = session.open_dbhandler('torrents') self.database_thead.register(self.__check_overflow, delay=30.0) if session.get_dht_torrent_collecting(): self.drequesters[0] = MagnetRequester(self, 0) self.drequesters[1] = MagnetRequester(self, 1) self.tnrequester = ThumbnailRequester(self, self.session) self.registered = True def is_registered(self): return self.registered def shutdown(self): self.tqueue.shutdown(True) def set_max_num_torrents(self, max_num_torrents): self.max_num_torrents = max_num_torrents def __check_overflow(self): while True: self.num_torrents = self.torrent_db.getNumberCollectedTorrents() if DEBUG: print >> sys.stderr, "rtorrent: check overflow: current", self.num_torrents, "max", self.max_num_torrents if self.num_torrents > self.max_num_torrents: num_delete = int(self.num_torrents - self.max_num_torrents * 0.95) num_per_step = max(25, num_delete / 180) print >> sys.stderr, "rtorrent: ** limit space::", self.num_torrents, self.max_num_torrents, num_delete while num_delete > 0: to_remove = min(num_delete, num_per_step) num_delete -= to_remove self.torrent_db.freeSpace(to_remove) yield 5.0 LOW_PRIO_COLLECTING = 4 elif self.num_torrents > (self.max_num_torrents * .75): LOW_PRIO_COLLECTING = 3 else: LOW_PRIO_COLLECTING = 2 if DEBUG: print >> sys.stderr, "rtorrent: setting low_prio_collection to one .torrent every %.1f seconds" % (LOW_PRIO_COLLECTING * .5) yield 30 * 60.0 # run every 30 minutes @property def searchcommunity(self): if self.registered: if not self._searchcommunity: from Tribler.community.search.community import SearchCommunity for community in self.dispersy.get_communities(): if isinstance(community, SearchCommunity): self._searchcommunity = community break return self._searchcommunity def has_thumbnail(self, infohash): thumb_dir = os.path.join(self.tor_col_dir, 'thumbs-' + binascii.hexlify(infohash)) return os.path.isdir(thumb_dir) and os.listdir(thumb_dir) def download_thumbnail(self, candidate, roothash, infohash, usercallback=None, timeout=None): if self.registered and not self.has_thumbnail(roothash): raw_lambda = lambda candidate = candidate, roothash = roothash, infohash = infohash, usercallback = usercallback, timeout = timeout: self._download_thumbnail(candidate, roothash, infohash, usercallback, timeout) self.scheduletask(raw_lambda) def _download_thumbnail(self, candidate, roothash, infohash, usercallback, timeout): if usercallback: self.callbacks.setdefault(roothash, set()).add(usercallback) self.tnrequester.add_request((roothash, infohash), candidate, timeout) if DEBUG: print >> sys.stderr, 'rtorrent: adding thumbnail request:', roothash or '', candidate def download_torrent(self, candidate, infohash=None, roothash=None, usercallback=None, prio=1, timeout=None): if self.registered: raw_lambda = lambda candidate = candidate, infohash = infohash, roothash = roothash, usercallback = usercallback, prio = prio, timeout = timeout: self._download_torrent(candidate, infohash, roothash, usercallback, prio, timeout) self.scheduletask(raw_lambda) def _download_torrent(self, candidate, infohash, roothash, usercallback, prio, timeout): if self.registered: assert infohash or roothash, "We need either the info or roothash" doSwiftCollect = candidate and roothash if doSwiftCollect: requesters = self.trequesters hash = (infohash, roothash) elif infohash: requesters = self.drequesters hash = infohash # fix prio levels to 1 and 0 prio = min(prio, 1) else: return if usercallback: self.callbacks.setdefault(hash, set()).add(usercallback) # look for lowest prio requester, which already has this infohash scheduled requester = None for i in range(prio, prio + 1): if i in requesters and requesters[i].is_being_requested(hash): requester = requesters[i] break # if not found, then used/create this requester if not requester: if prio not in requesters: if doSwiftCollect: requesters[prio] = TorrentRequester(self, self.drequesters.get(1, None), self.session, prio) elif self.session.get_dht_torrent_collecting(): requesters[prio] = MagnetRequester(self, prio) requester = requesters[prio] # make request if requester: requester.add_request(hash, candidate, timeout) if DEBUG: print >> sys.stderr, 'rtorrent: adding torrent request:', bin2str(infohash or ''), bin2str(roothash or ''), candidate, prio def download_torrentmessages(self, candidate, infohashes, usercallback=None, prio=1): if self.registered: raw_lambda = lambda candidate = candidate, infohashes = infohashes, usercallback = usercallback, prio = prio: self._download_torrentmessages(candidate, infohashes, usercallback, prio) self.scheduletask(raw_lambda) def _download_torrentmessages(self, candidate, infohashes, usercallback, prio): assert all(isinstance(infohash, str) for infohash in infohashes), "INFOHASH has invalid type" assert all(len(infohash) == INFOHASH_LENGTH for infohash in infohashes), "INFOHASH has invalid length:" if self.registered: if usercallback: for infohash in infohashes: callback = lambda infohash = infohash: usercallback(infohash) self.callbacks.setdefault((infohash, None), set()).add(callback) if prio not in self.mrequesters: self.mrequesters[prio] = TorrentMessageRequester(self, self.searchcommunity, prio) requester = self.mrequesters[prio] # make request requester.add_request(frozenset(infohashes), candidate) if DEBUG: print >> sys.stderr, 'rtorrent: adding torrent messages request:', map(bin2str, infohashes), candidate, prio def has_torrent(self, infohash, callback): if self.torrent_db: self.database_thead.register(self._has_torrent, args=(infohash, self.tor_col_dir, callback)) else: callback(False) def _has_torrent(self, infohash, tor_col_dir, callback): # save torrent result = False torrent = self.torrent_db.getTorrent(infohash, ['torrent_file_name', 'swift_torrent_hash'], include_mypref=False) if torrent: if torrent.get('torrent_file_name', False) and os.path.isfile(torrent['torrent_file_name']): result = torrent['torrent_file_name'] elif torrent.get('swift_torrent_hash', False): sdef = SwiftDef(torrent['swift_torrent_hash']) torrent_filename = os.path.join(tor_col_dir, sdef.get_roothash_as_hex()) if os.path.isfile(torrent_filename): self.torrent_db.updateTorrent(infohash, notify=False, torrent_file_name=torrent_filename) result = torrent_filename raw_lambda = lambda result = result: callback(result) self.scheduletask(raw_lambda) def save_torrent(self, tdef, callback=None): if self.registered: def do_schedule(filename): if not filename: self._save_torrent(tdef, callback) elif callback: self.database_thead.register(callback) infohash = tdef.get_infohash() self.has_torrent(infohash, do_schedule) def _save_torrent(self, tdef, callback=None): tmp_filename = os.path.join(self.session.get_torrent_collecting_dir(), "tmp_" + get_collected_torrent_filename(tdef.get_infohash())) filename_index = 0 while os.path.exists(tmp_filename): filename_index += 1 tmp_filename = os.path.join(self.session.get_torrent_collecting_dir(), ("tmp_%d_" % filename_index) + get_collected_torrent_filename(tdef.get_infohash())) tdef.save(tmp_filename) sdef, swiftpath = self._write_to_collected(tmp_filename) try: os.remove(tmp_filename) except: atexit.register(lambda tmp_filename=tmp_filename: os.remove(tmp_filename)) def do_db(callback): # add this new torrent to db infohash = tdef.get_infohash() if self.torrent_db.hasTorrent(infohash): self.torrent_db.updateTorrent(infohash, swift_torrent_hash=sdef.get_roothash(), torrent_file_name=swiftpath) else: self.torrent_db.addExternalTorrent(tdef, extra_info={'filename': swiftpath, 'swift_torrent_hash': sdef.get_roothash(), 'status': 'good'}) # notify all self.notify_possible_torrent_infohash(infohash, True) if callback: callback() if self.torrent_db: self.database_thead.register(do_db, args=(callback,)) elif callback: callback() def _write_to_collected(self, filename): # calculate root-hash sdef = SwiftDef() sdef.add_content(filename) sdef.finalize(self.session.get_swift_path(), destdir=self.session.get_torrent_collecting_dir()) mfpath = os.path.join(self.session.get_torrent_collecting_dir(), sdef.get_roothash_as_hex()) if not os.path.exists(mfpath): download = self.session.get_download(sdef.get_roothash()) if download: self.session.remove_download(download, removestate=True) sleep(1) elif os.path.exists(mfpath + ".mhash"): # indicating failed swift download os.remove(mfpath + ".mhash") try: shutil.copy(filename, mfpath) shutil.move(filename + '.mhash', mfpath + '.mhash') shutil.move(filename + '.mbinmap', mfpath + '.mbinmap') except: print_exc() return sdef, mfpath def notify_possible_torrent_roothash(self, roothash): keys = self.callbacks.keys() for key in keys: if key[1] == roothash: handle_lambda = lambda key = key: self._handleCallback(key, True) self.scheduletask(handle_lambda) def do_db(tdef): if self.torrent_db.hasTorrent(tdef.get_infohash()): self.torrent_db.updateTorrent(tdef.get_infohash(), swift_torrent_hash=sdef.get_roothash(), torrent_file_name=swiftpath) else: self.torrent_db._addTorrentToDB(tdef, source="SWIFT", extra_info={'filename': swiftpath, 'swift_torrent_hash': roothash, 'status': 'good'}, commit=True) sdef = SwiftDef(roothash) swiftpath = os.path.join(self.session.get_torrent_collecting_dir(), sdef.get_roothash_as_hex()) if os.path.exists(swiftpath) and self.torrent_db: try: tdef = TorrentDef.load(swiftpath) self.database_thead.register(do_db, args=(tdef,)) except: # ignore if tdef loading fails pass def notify_possible_thumbnail_roothash(self, roothash): keys = self.callbacks.keys() for key in keys: if key == roothash: handle_lambda = lambda key = key: self._handleCallback(key, True) self.scheduletask(handle_lambda) print >> sys.stderr, 'rtorrent: finished downloading thumbnail:', binascii.hexlify(roothash) def notify_possible_torrent_infohash(self, infohash, actualTorrent=False): keys = self.callbacks.keys() for key in keys: if key[0] == infohash or key == infohash: handle_lambda = lambda key = key, actualTorrent = actualTorrent: self._handleCallback(key, actualTorrent) self.scheduletask(handle_lambda) def _handleCallback(self, key, torrent=True): if DEBUG: print >> sys.stderr, 'rtorrent: got torrent for:', key if key in self.callbacks: for usercallback in self.callbacks[key]: self.session.uch.perform_usercallback(usercallback) del self.callbacks[key] if torrent: for requester in self.trequesters.values(): if requester.is_being_requested(key): requester.remove_request(key) for requester in self.drequesters.values(): if requester.is_being_requested(key): requester.remove_request(key) else: for requester in self.mrequesters.values(): if requester.is_being_requested(key): requester.remove_request(key) def getQueueSize(self): def getQueueSize(qname, requesters): qsize = {} for requester in requesters.itervalues(): if len(requester.sources): qsize[requester.prio] = len(requester.sources) items = qsize.items() if items: items.sort() return "%s: " % qname + ",".join(map(str, items)) return '' return ", ".join([qstring for qstring in [getQueueSize("TQueue", self.trequesters), getQueueSize("DQueue", self.drequesters), getQueueSize("MQueue", self.mrequesters)] if qstring]) def getQueueSuccess(self): def getQueueSuccess(qname, requesters): sum_requests = sum_success = sum_fail = sum_on_disk = 0 print_value = False for requester in requesters.itervalues(): if requester.requests_success >= 0: print_value = True sum_requests += requester.requests_made sum_success += requester.requests_success sum_fail += requester.requests_fail sum_on_disk += requester.requests_on_disk if print_value: return "%s: %d/%d" % (qname, sum_success, sum_requests), "%s: success %d, pending %d, on disk %d, failed %d" % (qname, sum_success, sum_requests - sum_success - sum_fail - sum_on_disk, sum_on_disk, sum_fail) return '', '' return [(qstring, qtooltip) for qstring, qtooltip in [getQueueSuccess("TQueue", self.trequesters), getQueueSuccess("DQueue", self.drequesters), getQueueSuccess("MQueue", self.mrequesters)] if qstring] def remove_all_requests(self): print >> sys.stderr, "ONLY USE FOR TESTING PURPOSES" for requester in self.trequesters.values() + self.mrequesters.values() + self.drequesters.values(): requester.remove_all_requests