class FakeCallback(): def __init__(self): from Tribler.Utilities.TimedTaskQueue import TimedTaskQueue self.queue = TimedTaskQueue("FakeCallback") def register(self, call, args=(), kargs=None, delay=0.0, priority=0, id_=u"", callback=None, callback_args=(), callback_kargs=None, include_id=False): def do_task(): if kargs: call(*args, **kargs) else: call(*args) if callback: if callback_kargs: callback(*callback_args, **callback_kargs) else: callback(*callback_args) self.queue.add_task(do_task, t=delay) def shutdown(self, immediately=False): self.queue.shutdown(immediately)
def setUp(self): # prepare database launchmany = FakeLauchMany() self.overlay_bridge = TimedTaskQueue() #self.overlay_bridge = FakeOverlayBridge() self.data_handler = DataHandler(launchmany, self.overlay_bridge, max_num_peers=2500)
def setUp(self): # prepare database launchmany = FakeLauchMany() self.overlay_bridge = TimedTaskQueue() #self.overlay_bridge = FakeOverlayBridge() superpeer = False # enable it to test superpeer self.bc = BuddyCastFactory.getInstance(superpeer=superpeer) self.bc.register(self.overlay_bridge, launchmany, None, None, None, True)
def __init__(self): if OverlayThreadingBridge.__single: raise RuntimeError, "OverlayThreadingBridge is Singleton" OverlayThreadingBridge.__single = self self.secover = None self.olapps = None self.olappsmsghandler = None self.olappsconnhandler = None # Current impl of wrapper: single thread self.tqueue = TimedTaskQueue(nameprefix="Overlay")
def __init__(self): if OverlayThreadingBridge.__single: raise RuntimeError, "OverlayThreadingBridge is Singleton" OverlayThreadingBridge.__single = self self.secover = None self.olapps = None self.olappsmsghandler = None self.olappsconnhandler = None # Current impl of wrapper: single thread self.tqueue = TimedTaskQueue(nameprefix="Overlay") self.gcqueue = TimedTaskQueue(nameprefix="GameCast")
class TestBuddyCastDataHandler(unittest.TestCase): def setUp(self): # prepare database launchmany = FakeLauchMany() self.overlay_bridge = TimedTaskQueue() #self.overlay_bridge = FakeOverlayBridge() self.data_handler = DataHandler(launchmany, self.overlay_bridge, max_num_peers=2500) def tearDown(self): self.overlay_bridge.add_task('quit') def test_postInit(self): #self.data_handler.postInit() self.data_handler.postInit(1,50,0, 50)
class TestBuddyCastDataHandler(unittest.TestCase): def setUp(self): # prepare database launchmany = FakeLauchMany() self.overlay_bridge = TimedTaskQueue() #self.overlay_bridge = FakeOverlayBridge() self.data_handler = DataHandler(launchmany, self.overlay_bridge, max_num_peers=2500) def tearDown(self): self.overlay_bridge.add_task('quit') def test_postInit(self): #self.data_handler.postInit() self.data_handler.postInit(1, 50, 0, 50)
def register(self, dispersy, session, max_num_torrents): self.session = session self.dispersy = dispersy self.max_num_torrents = max_num_torrents self.tor_col_dir = self.session.get_torrent_collecting_dir() from Tribler.Utilities.TimedTaskQueue import TimedTaskQueue self.tqueue = TimedTaskQueue("RemoteTorrentHandler") self.scheduletask = self.tqueue.add_task self.torrent_db = session.open_dbhandler('torrents') self.channel_db = self.session.open_dbhandler(NTFY_CHANNELCAST) self.drequesters[0] = MagnetRequester(self, 0) self.drequesters[1] = MagnetRequester(self, 1) self.tnrequester = ThumbnailRequester(self, self.session) self.registered = True startWorker(None, self.__check_overflow)
def test_addTask(self): self.queue = TimedTaskQueue() self.count = 0 self.queue.add_task(self.task3a, 3) self.queue.add_task(self.task0, 0) self.queue.add_task(self.task3b, 3) self.queue.add_task(self.task2, 1) sleep(6) assert self.count == 11 del self.queue
def test_addTask0FIFO(self): self.queue = TimedTaskQueue() self.count = 0 self.queue.add_task(self.task0a, 0) self.queue.add_task(self.task0b, 0) self.queue.add_task(self.task0c, 0) self.queue.add_task(self.task0d, 0) sleep(6) assert self.count == 4 del self.queue
def setUp(self): # prepare database launchmany = FakeLauchMany() self.overlay_bridge = TimedTaskQueue() #self.overlay_bridge = FakeOverlayBridge() superpeer=False # enable it to test superpeer self.bc = BuddyCastFactory.getInstance(superpeer=superpeer) self.bc.register(self.overlay_bridge, launchmany, None, None, None, True)
def register(self, dispersy, database_thead, session, max_num_torrents): self.session = session self.dispersy = dispersy self.database_thead = database_thead self.max_num_torrents = max_num_torrents self.tor_col_dir = self.session.get_torrent_collecting_dir() from Tribler.Utilities.TimedTaskQueue import TimedTaskQueue self.tqueue = TimedTaskQueue("RemoteTorrentHandler") self.scheduletask = self.tqueue.add_task self.torrent_db = None if self.session.get_megacache(): self.torrent_db = session.open_dbhandler('torrents') self.database_thead.register(self.__check_overflow, delay=30.0) if session.get_dht_torrent_collecting(): self.drequesters[0] = MagnetRequester(self, 0) self.drequesters[1] = MagnetRequester(self, 1) self.tnrequester = ThumbnailRequester(self, self.session) self.registered = True
def register(self, dispersy, session): self.session = session self.dispersy = dispersy from Tribler.Utilities.TimedTaskQueue import TimedTaskQueue tqueue = TimedTaskQueue("RemoteTorrentHandler") self.scheduletask = tqueue.add_task self.torrent_db = session.open_dbhandler('torrents') self.drequesters[0] = MagnetRequester(self, 0) self.drequesters[1] = MagnetRequester(self, 1) self.registered = True
class TestGUITaskQueue(unittest.TestCase): def setUp(self): self.ntasks = 0 self.completed = [] self.guiserver = TimedTaskQueue() def tearDown(self): sleep(2) self.completed.sort() if self.completed != range(self.ntasks): print "test failed",self.completed self.assert_(False) def test_simple(self): self.ntasks = 1 self.guiserver.add_task(lambda:self.task(0),0) def test_more(self): self.ntasks = 10 for i in range(self.ntasks): # lambda functions are evil, this is not the same as lambda:task(i) self.guiserver.add_task(self.define_task(i),0) def test_delay(self): self.ntasks = 1 self.guiserver.add_task(lambda:self.task(0),3) print "test: sleeping 5 secs so tasks gets executed" sleep(5) def test_delay2(self): self.ntasks = 2 self.guiserver.add_task(lambda:self.task(1),3) self.guiserver.add_task(lambda:self.task(0),1) print "test: sleeping 5 secs so tasks gets executed" sleep(5) def define_task(self,num): return lambda:self.task(num) def task(self,num): print "Running task",num self.completed.append(num)
class TestGUITaskQueue(unittest.TestCase): def setUp(self): self.ntasks = 0 self.completed = [] self.guiserver = TimedTaskQueue() def tearDown(self): sleep(2) self.completed.sort() if self.completed != range(self.ntasks): print "test failed", self.completed self.assert_(False) def test_simple(self): self.ntasks = 1 self.guiserver.add_task(lambda: self.task(0), 0) def test_more(self): self.ntasks = 10 for i in range(self.ntasks): # lambda functions are evil, this is not the same as lambda:task(i) self.guiserver.add_task(self.define_task(i), 0) def test_delay(self): self.ntasks = 1 self.guiserver.add_task(lambda: self.task(0), 3) print "test: sleeping 5 secs so tasks gets executed" sleep(5) def test_delay2(self): self.ntasks = 2 self.guiserver.add_task(lambda: self.task(1), 3) self.guiserver.add_task(lambda: self.task(0), 1) print "test: sleeping 5 secs so tasks gets executed" sleep(5) def define_task(self, num): return lambda: self.task(num) def task(self, num): print "Running task", num self.completed.append(num)
def __init__(self, redirectstderrout, appname, appversion, params, single_instance_checker, installdir, i2iport, sport, httpport): # Almost generic HTTP server self.videoHTTPServer = VideoHTTPServer(httpport) self.videoHTTPServer.register(self.videoservthread_error_callback, self.videoservthread_set_status_callback) BaseApp.__init__(self, redirectstderrout, appname, appversion, params, single_instance_checker, installdir, i2iport, sport) self.httpport = httpport # SEARCH:P2P # Maps a query ID to the original searchstr, timestamp and all hits (local + remote) self.id2hits = Query2HitsMap() # Maps a URL path received by HTTP server to the requested resource, # reading or generating it dynamically. # # For saving .torrents received in hits to P2P searches using # SIMPLE+METADATA queries self.tqueue = TimedTaskQueue(nameprefix="BGTaskQueue") self.searchmapper = SearchPathMapper(self.s, self.id2hits, self.tqueue) self.hits2anypathmapper = Hits2AnyPathMapper(self.s, self.id2hits) self.videoHTTPServer.add_path_mapper(self.searchmapper) self.videoHTTPServer.add_path_mapper(self.hits2anypathmapper) # WEB Interface # Maps a URL path received by HTTP server to the requested resource, # reading or generating it dynamically. self.webIFmapper = WebIFPathMapper(self, self.s) self.videoHTTPServer.add_path_mapper(self.webIFmapper) # Generic HTTP server start. Don't add mappers dynamically afterwards! self.videoHTTPServer.background_serve() # Maps Downloads to a using InstanceConnection and streaminfo when it # plays. So it contains the Downloads in VOD mode for which there is # active interest from a plugin. # # At the moment each Download is used/owned by a single IC and a new # request for the same torrent will stop playback to the original IC # and resume it to the new user. # self.dusers = {} self.approxplayerstate = MEDIASTATE_STOPPED self.counter = 0 # counter for the stats reported periodically self.interval = 120 # report interval self.iseedeadpeople = False if sys.platform == "win32": # If the BG Process is started by the plug-in notify it with an event try: startupEvent = win32event.CreateEvent(None, 0, 0, 'startupEvent') win32event.SetEvent(startupEvent) win32api.CloseHandle( startupEvent ) # TODO : is it possible to avoid importing win32api just to close an handler? except: pass
def setUp(self): self.queue = TimedTaskQueue()
class TestTimedTaskQueue(unittest.TestCase): def setUp(self): self.queue = TimedTaskQueue() def tearDown(self): self.queue.shutdown() del self.queue def test_addTask(self): self.count = 0 self.queue.add_task(self.task3a, 3) self.queue.add_task(self.task0, 0) self.queue.add_task(self.task3b, 3) self.queue.add_task(self.task2, 1) sleep(6) assert self.count == 11 def task0(self): self.count += 1 assert self.count == 1 def task2(self): self.count += 2 assert self.count == 3 def task3a(self): self.count += 4 assert self.count == 7 or self.count == 11 def task3b(self): self.count += 4 assert self.count == 7 or self.count == 11 def test_addTask0FIFO(self): self.count = 0 self.queue.add_task(self.task0a, 0) self.queue.add_task(self.task0b, 0) self.queue.add_task(self.task0c, 0) self.queue.add_task(self.task0d, 0) sleep(6) assert self.count == 4 def task0a(self): assert self.count == 0 self.count = 1 def task0b(self): assert self.count == 1 self.count = 2 def task0c(self): assert self.count == 2 self.count = 3 def task0d(self): assert self.count == 3 self.count = 4
class RemoteTorrentHandler: __single = None def __init__(self): if RemoteTorrentHandler.__single: raise RuntimeError, "RemoteTorrentHandler is singleton" RemoteTorrentHandler.__single = self self.registered = False self._searchcommunity = None self.callbacks = {} self.trequesters = {} self.mrequesters = {} self.drequesters = {} self.tnrequester = None def getInstance(*args, **kw): if RemoteTorrentHandler.__single is None: RemoteTorrentHandler(*args, **kw) return RemoteTorrentHandler.__single getInstance = staticmethod(getInstance) def register(self, dispersy, session, max_num_torrents): self.session = session self.dispersy = dispersy self.max_num_torrents = max_num_torrents self.tor_col_dir = self.session.get_torrent_collecting_dir() from Tribler.Utilities.TimedTaskQueue import TimedTaskQueue self.tqueue = TimedTaskQueue("RemoteTorrentHandler") self.scheduletask = self.tqueue.add_task self.torrent_db = session.open_dbhandler('torrents') self.channel_db = self.session.open_dbhandler(NTFY_CHANNELCAST) self.drequesters[0] = MagnetRequester(self, 0) self.drequesters[1] = MagnetRequester(self, 1) self.tnrequester = ThumbnailRequester(self, self.session) self.registered = True startWorker(None, self.__check_overflow) def is_registered(self): return self.registered def shutdown(self): self.tqueue.shutdown() def __check_overflow(self): while True: num_torrents = self.torrent_db.getNumberCollectedTorrents() if DEBUG: print >>sys.stderr,"rtorrent: check overflow: current", num_torrents, "max", self.max_num_torrents if num_torrents > self.max_num_torrents: num_delete = int(num_torrents - self.max_num_torrents*0.95) num_per_step = max(25, num_delete / 180) print >> sys.stderr, "rtorrent: ** limit space::", num_torrents, self.max_num_torrents, num_delete while num_delete > 0: to_remove = min(num_delete, num_per_step) num_delete -= to_remove self.torrent_db.freeSpace(to_remove) yield 5.0 LOW_PRIO_COLLECTING = 4 elif num_torrents > (self.max_num_torrents * .75): LOW_PRIO_COLLECTING = 3 else: LOW_PRIO_COLLECTING = 2 if DEBUG: print >> sys.stderr, "rtorrent: setting low_prio_collection to one .torrent every %.1f seconds"%(LOW_PRIO_COLLECTING *.5) yield 30 * 60.0 #run every 30 minutes @property def searchcommunity(self): if self.registered: if not self._searchcommunity: from Tribler.community.search.community import SearchCommunity for community in self.dispersy.get_communities(): if isinstance(community, SearchCommunity): self._searchcommunity = community break return self._searchcommunity def has_thumbnail(self, infohash): thumb_dir = os.path.join(self.tor_col_dir, 'thumbs-'+binascii.hexlify(infohash)) return os.path.isdir(thumb_dir) and os.listdir(thumb_dir) def download_thumbnail(self, candidate, roothash, infohash, usercallback = None, timeout = None): if self.registered and not self.has_thumbnail(roothash): raw_lambda = lambda candidate=candidate, roothash=roothash, infohash=infohash, usercallback=usercallback, timeout = timeout: self._download_thumbnail(candidate, roothash, infohash, usercallback, timeout) self.scheduletask(raw_lambda) def _download_thumbnail(self, candidate, roothash, infohash, usercallback, timeout): if usercallback: self.callbacks.setdefault(roothash, set()).add(usercallback) self.tnrequester.add_request((roothash, infohash), candidate, timeout) if DEBUG: print >> sys.stderr,'rtorrent: adding thumbnail request:', roothash or '', candidate def download_torrent(self, candidate, infohash = None, roothash = None, usercallback = None, prio = 1, timeout = None): if self.registered: raw_lambda = lambda candidate=candidate, infohash=infohash, roothash=roothash, usercallback=usercallback, prio=prio, timeout = timeout: self._download_torrent(candidate, infohash, roothash, usercallback, prio, timeout) self.scheduletask(raw_lambda) def _download_torrent(self, candidate, infohash, roothash, usercallback, prio, timeout): if self.registered: assert infohash or roothash, "We need either the info or roothash" doSwiftCollect = candidate and roothash if doSwiftCollect: requesters = self.trequesters hash = (infohash, roothash) elif infohash: requesters = self.drequesters hash = infohash #fix prio levels to 1 and 0 prio = min(prio, 1) else: return if usercallback: self.callbacks.setdefault(hash, set()).add(usercallback) #look for lowest prio requester, which already has this infohash scheduled requester = None for i in range(prio, prio + 1): if i in requesters and requesters[i].is_being_requested(hash): requester = requesters[i] break #if not found, then used/create this requester if not requester: if prio not in requesters: if doSwiftCollect: requesters[prio] = TorrentRequester(self, self.drequesters[1], self.session, prio) else: requesters[prio] = MagnetRequester(self, prio) requester = requesters[prio] #make request requester.add_request(hash, candidate, timeout) if DEBUG: print >>sys.stderr,'rtorrent: adding torrent request:', bin2str(infohash or ''), bin2str(roothash or ''), candidate, prio def download_torrentmessages(self, candidate, infohashes, usercallback = None, prio = 1): if self.registered: raw_lambda = lambda candidate=candidate, infohashes=infohashes, usercallback=usercallback, prio=prio: self._download_torrentmessages(candidate, infohashes, usercallback, prio) self.scheduletask(raw_lambda) def _download_torrentmessages(self, candidate, infohashes, usercallback, prio): assert all(isinstance(infohash, str) for infohash in infohashes), "INFOHASH has invalid type" assert all(len(infohash) == INFOHASH_LENGTH for infohash in infohashes), "INFOHASH has invalid length:" if self.registered: if usercallback: for infohash in infohashes: callback = lambda infohash=infohash: usercallback(infohash) self.callbacks.setdefault((infohash,None), set()).add(callback) if prio not in self.mrequesters: self.mrequesters[prio] = TorrentMessageRequester(self, self.searchcommunity, prio) requester = self.mrequesters[prio] #make request requester.add_request(frozenset(infohashes), candidate) if DEBUG: print >>sys.stderr,'rtorrent: adding torrent messages request:', map(bin2str, infohashes), candidate, prio def has_torrent(self, infohash, callback): startWorker(None, self._has_torrent, wargs = (infohash, self.tor_col_dir, callback)) def _has_torrent(self, infohash, tor_col_dir, callback): #save torrent result = False torrent = self.torrent_db.getTorrent(infohash, ['torrent_file_name', 'swift_torrent_hash'], include_mypref = False) if torrent: if torrent.get('torrent_file_name', False) and os.path.isfile(torrent['torrent_file_name']): result = torrent['torrent_file_name'] elif torrent.get('swift_torrent_hash', False): sdef = SwiftDef(torrent['swift_torrent_hash']) torrent_filename = os.path.join(tor_col_dir, sdef.get_roothash_as_hex()) if os.path.isfile(torrent_filename): self.torrent_db.updateTorrent(infohash, notify=False, torrent_file_name=torrent_filename) result = torrent_filename raw_lambda = lambda result=result: callback(result) self.scheduletask(raw_lambda) def save_torrent(self, tdef, callback = None): if self.registered: def do_schedule(filename): if not filename: self._save_torrent(tdef, callback) elif callback: startWorker(None, callback) infohash = tdef.get_infohash() self.has_torrent(infohash, do_schedule) def _save_torrent(self, tdef, callback = None): tmp_filename = os.path.join(self.session.get_torrent_collecting_dir(), "tmp_"+get_collected_torrent_filename(tdef.get_infohash())) filename_index = 0 while os.path.exists(tmp_filename): filename_index += 1 tmp_filename = os.path.join(self.session.get_torrent_collecting_dir(), ("tmp_%d_"%filename_index)+get_collected_torrent_filename(tdef.get_infohash())) tdef.save(tmp_filename) sdef, swiftpath = self._write_to_collected(tmp_filename) try: os.remove(tmp_filename) except: atexit.register(lambda tmp_filename=tmp_filename: os.remove(tmp_filename)) def do_db(callback): #add this new torrent to db infohash = tdef.get_infohash() if self.torrent_db.hasTorrent(infohash): self.torrent_db.updateTorrent(infohash, swift_torrent_hash = sdef.get_roothash(), torrent_file_name = swiftpath) else: self.torrent_db.addExternalTorrent(tdef, extra_info = {'filename': swiftpath, 'swift_torrent_hash':sdef.get_roothash(), 'status':'good'}) #notify all self.notify_possible_torrent_infohash(infohash, True) if callback: callback() startWorker(None, do_db, wargs = (callback, )) def _write_to_collected(self, filename): #calculate root-hash sdef = SwiftDef() sdef.add_content(filename) sdef.finalize(self.session.get_swift_path(), destdir = self.session.get_torrent_collecting_dir()) mfpath = os.path.join(self.session.get_torrent_collecting_dir(),sdef.get_roothash_as_hex()) if not os.path.exists(mfpath): download = self.session.get_download(sdef.get_roothash()) if download: self.session.remove_download(download, removestate = True) sleep(1) elif os.path.exists(mfpath + ".mhash"): #indicating failed swift download os.remove(mfpath + ".mhash") try: shutil.copy(filename, mfpath) shutil.move(filename+'.mhash', mfpath+'.mhash') shutil.move(filename+'.mbinmap', mfpath+'.mbinmap') except: print_exc() return sdef, mfpath def notify_possible_torrent_roothash(self, roothash): keys = self.callbacks.keys() for key in keys: if key[1] == roothash: handle_lambda = lambda key=key: self._handleCallback(key, True) self.scheduletask(handle_lambda) def do_db(tdef): if self.torrent_db.hasTorrent(tdef.get_infohash()): self.torrent_db.updateTorrent(tdef.get_infohash(), swift_torrent_hash = sdef.get_roothash(), torrent_file_name = swiftpath) else: self.torrent_db._addTorrentToDB(tdef, source = "SWIFT", extra_info = {'filename': swiftpath, 'swift_torrent_hash':roothash, 'status':'good'}, commit = True) sdef = SwiftDef(roothash) swiftpath = os.path.join(self.session.get_torrent_collecting_dir(),sdef.get_roothash_as_hex()) if os.path.exists(swiftpath): try: tdef = TorrentDef.load(swiftpath) startWorker(None, do_db, wargs = (tdef, )) except: #ignore if tdef loading fails pass def notify_possible_thumbnail_roothash(self, roothash): keys = self.callbacks.keys() for key in keys: if key == roothash: handle_lambda = lambda key=key: self._handleCallback(key, True) self.scheduletask(handle_lambda) print >>sys.stderr,'rtorrent: finished downloading thumbnail:', binascii.hexlify(roothash) def notify_possible_torrent_infohash(self, infohash, actualTorrent = False): keys = self.callbacks.keys() for key in keys: if key[0] == infohash or key == infohash: handle_lambda = lambda key=key, actualTorrent=actualTorrent: self._handleCallback(key, actualTorrent) self.scheduletask(handle_lambda) def _handleCallback(self, key, torrent = True): if DEBUG: print >>sys.stderr,'rtorrent: got torrent for:', key if key in self.callbacks: for usercallback in self.callbacks[key]: self.session.uch.perform_usercallback(usercallback) del self.callbacks[key] if torrent: for requester in self.trequesters.values(): if requester.is_being_requested(key): requester.remove_request(key) for requester in self.drequesters.values(): if requester.is_being_requested(key): requester.remove_request(key) else: for requester in self.mrequesters.values(): if requester.is_being_requested(key): requester.remove_request(key) def getQueueSize(self): def getQueueSize(qname, requesters): qsize = {} for requester in requesters.itervalues(): if len(requester.sources): qsize[requester.prio] = len(requester.sources) items = qsize.items() if items: items.sort() return "%s: "%qname + ",".join(map(str, items)) return '' return ", ".join([qstring for qstring in [getQueueSize("TQueue", self.trequesters), getQueueSize("DQueue", self.drequesters), getQueueSize("MQueue", self.mrequesters)] if qstring]) def getQueueSuccess(self): def getQueueSuccess(qname, requesters): sum_requests = sum_success = 0 print_value = False for requester in requesters.itervalues(): if requester.requests_success >= 0: print_value = True sum_requests += requester.requests_made sum_success += requester.requests_success if print_value: return "%s: %d/%d"%(qname, sum_success, sum_requests) return '' return ", ".join([qstring for qstring in [getQueueSuccess("TQueue", self.trequesters), getQueueSuccess("DQueue", self.drequesters), getQueueSuccess("MQueue", self.mrequesters)] if qstring])
class TestBuddyCast(unittest.TestCase): def setUp(self): # prepare database launchmany = FakeLauchMany() self.overlay_bridge = TimedTaskQueue() #self.overlay_bridge = FakeOverlayBridge() superpeer=False # enable it to test superpeer self.bc = BuddyCastFactory.getInstance(superpeer=superpeer) self.bc.register(self.overlay_bridge, launchmany, None, None, None, True) def tearDown(self): self.overlay_bridge.add_task('quit') print "Before join" def remove_t_index(self): indices = [ 'Torrent_length_idx', 'Torrent_creation_date_idx', 'Torrent_relevance_idx', 'Torrent_num_seeders_idx', 'Torrent_num_leechers_idx', #'Torrent_name_idx', ] for index in indices: sql = 'drop index ' + index self.data_handler.torrent_db._db.execute_write(sql) def remove_p_index(self): indices = [ 'Peer_name_idx', 'Peer_ip_idx', 'Peer_similarity_idx', 'Peer_last_seen_idx', 'Peer_last_connected_idx', 'Peer_num_peers_idx', 'Peer_num_torrents_idx' ] for index in indices: sql = 'drop index ' + index self.data_handler.peer_db._db.execute_write(sql) def local_test(self): self.remove_t_index() self.remove_p_index() from Tribler.Test.log_parser import get_buddycast_data #start_time = time() #print >> sys.stderr, "buddycast: ******************* start local test" costs = [] self.data_handler.postInit(updatesim=False) for permid, selversion, msg in get_buddycast_data(os.path.join(FILES_DIR,'superpeer120070902sp7001.log')): message = bencode(msg) #print 'got msg:', permid, selversion, message try: s = time() self.bc.gotBuddyCastMessage(message, permid, selversion) cost = time()-s costs.append(cost) except: print_exc() break print 'got msg: %d %.2f %.2f %.2f %.2f' %(len(costs), cost, min(costs), sum(costs)/len(costs), max(costs)) # with all indices, min/avg/max: 0.00 1.78 4.57 seconds # without index, min/avg/max: 0.00 1.38 3.43 seconds (58) print "Done" def test_start(self): try: self.bc.olthread_register(start=False) self.data_handler = self.bc.data_handler self.local_test() print "Sleeping for 10 secs" sleep(10) print "Done2" except: print_exc() self.assert_(False)
class OverlayThreadingBridge: __single = None lock = threading.Lock() def __init__(self): if OverlayThreadingBridge.__single: raise RuntimeError, "OverlayThreadingBridge is Singleton" OverlayThreadingBridge.__single = self self.secover = None self.olapps = None self.olappsmsghandler = None self.olappsconnhandler = None # Current impl of wrapper: single thread self.tqueue = TimedTaskQueue(nameprefix="Overlay") self.gcqueue = TimedTaskQueue(nameprefix="GameCast") def getInstance(*args, **kw): # Singleton pattern with double-checking if OverlayThreadingBridge.__single is None: OverlayThreadingBridge.lock.acquire() try: if OverlayThreadingBridge.__single is None: OverlayThreadingBridge(*args, **kw) finally: OverlayThreadingBridge.lock.release() return OverlayThreadingBridge.__single getInstance = staticmethod(getInstance) def resetSingleton(self): """ For testing purposes """ OverlayThreadingBridge.__single = None def register_bridge(self,secover,olapps): """ Called by MainThread """ self.secover = secover self.olapps = olapps secover.register_recv_callback(self.handleMessage) secover.register_conns_callback(self.handleConnection) # # SecOverlay interface # def register(self,launchmanycore,max_len): """ Called by MainThread """ self.secover.register(launchmanycore,max_len) # FOR TESTING ONLY self.iplport2oc = self.secover.iplport2oc def get_handler(self): return self.secover def start_listening(self): """ Called by MainThread """ self.secover.start_listening() def register_recv_callback(self,callback): """ Called by MainThread """ self.olappsmsghandler = callback def register_conns_callback(self,callback): """ Called by MainThread """ self.olappsconnhandler = callback def handleConnection(self,exc,permid,selversion,locally_initiated,hisdns): """ Called by NetworkThread """ # called by SecureOverlay.got_auth_connection() or cleanup_admin_and_callbacks() if DEBUG: print >>sys.stderr,"olbridge: handleConnection",exc,show_permid_short(permid),selversion,locally_initiated,hisdns,currentThread().getName() def olbridge_handle_conn_func(): # Called by OverlayThread if DEBUG: print >>sys.stderr,"olbridge: handle_conn_func",exc,show_permid_short(permid),selversion,locally_initiated,hisdns,currentThread().getName() try: if hisdns: self.secover.add_peer_to_db(permid,hisdns,selversion) if self.olappsconnhandler is not None: # self.olappsconnhandler = OverlayApps.handleConnection self.olappsconnhandler(exc,permid,selversion,locally_initiated) except: print_exc() if isinstance(exc,CloseException): self.secover.update_peer_status(permid,exc.was_auth_done()) self.tqueue.add_task(olbridge_handle_conn_func,0) def handleMessage(self,permid,selversion,message): """ Called by NetworkThread """ #ProxyService_ # # DEBUG #print "### olbridge: handleMessage", show_permid_short(permid), selversion, getMessageName(message[0]), currentThread().getName() # #_ProxyService if DEBUG: print >>sys.stderr,"olbridge: handleMessage",show_permid_short(permid),selversion,getMessageName(message[0]),currentThread().getName() def olbridge_handle_msg_func(): # Called by OverlayThread if DEBUG: print >>sys.stderr,"olbridge: handle_msg_func",show_permid_short(permid),selversion,getMessageName(message[0]),currentThread().getName() try: if self.olappsmsghandler is None: ret = True else: ret = self.olappsmsghandler(permid,selversion,message) except: print_exc() ret = False if ret == False: if DEBUG: print >>sys.stderr,"olbridge: olbridge_handle_msg_func closing!",show_permid_short(permid),selversion,getMessageName(message[0]),currentThread().getName() self.close(permid) if message[0] in GameCastMessages: self.gcqueue.add_task(olbridge_handle_msg_func,0) else: self.tqueue.add_task(olbridge_handle_msg_func,0) return True def connect_dns(self,dns,callback): """ Called by OverlayThread/NetworkThread """ if DEBUG: print >>sys.stderr,"olbridge: connect_dns",dns def olbridge_connect_dns_callback(cexc,cdns,cpermid,cselver): # Called by network thread if DEBUG: print >>sys.stderr,"olbridge: connect_dns_callback",cexc,cdns,show_permid_short(cpermid),cselver olbridge_connect_dns_callback_lambda = lambda:callback(cexc,cdns,cpermid,cselver) self.add_task(olbridge_connect_dns_callback_lambda,0) self.secover.connect_dns(dns,olbridge_connect_dns_callback) def connect(self,permid,callback,gamecast = False): """ Called by OverlayThread """ if DEBUG: print >>sys.stderr,"olbridge: connect",show_permid_short(permid), currentThread().getName() def olbridge_connect_callback(cexc,cdns,cpermid,cselver): # Called by network thread if DEBUG: print >>sys.stderr,"olbridge: connect_callback",cexc,cdns,show_permid_short(cpermid),cselver, callback, currentThread().getName() olbridge_connect_callback_lambda = lambda:callback(cexc,cdns,cpermid,cselver) # Jie: postpone to call this callback to schedule it after the peer has been added to buddycast connection list # Arno, 2008-09-15: No-no-no if gamecast: self.gcqueue.add_task(olbridge_connect_callback_lambda,0) else: self.add_task(olbridge_connect_callback_lambda,0) self.secover.connect(permid,olbridge_connect_callback) def send(self,permid,msg,callback,gamecast = False): """ Called by OverlayThread """ if DEBUG: print >>sys.stderr,"olbridge: send",show_permid_short(permid),len(msg) def olbridge_send_callback(cexc,cpermid): # Called by network thread if DEBUG: print >>sys.stderr,"olbridge: send_callback",cexc,show_permid_short(cpermid) olbridge_send_callback_lambda = lambda:callback(cexc,cpermid) if gamecast: self.gcqueue.add_task(olbridge_send_callback_lambda,0) else: self.add_task(olbridge_send_callback_lambda,0) self.secover.send(permid,msg,olbridge_send_callback) def close(self,permid): """ Called by OverlayThread """ self.secover.close(permid) def add_task(self,task,t=0,ident=None, gamecast = False): """ Called by OverlayThread """ if gamecast: self.gcqueue.add_task(task,t,ident) else: self.tqueue.add_task(task,t,ident)
class OverlayThreadingBridge: __single = None lock = threading.Lock() def __init__(self): if OverlayThreadingBridge.__single: raise RuntimeError, "OverlayThreadingBridge is Singleton" OverlayThreadingBridge.__single = self self.secover = None self.olapps = None self.olappsmsghandler = None self.olappsconnhandler = None # Current impl of wrapper: single thread self.tqueue = TimedTaskQueue(nameprefix="Overlay") def getInstance(*args, **kw): # Singleton pattern with double-checking if OverlayThreadingBridge.__single is None: OverlayThreadingBridge.lock.acquire() try: if OverlayThreadingBridge.__single is None: OverlayThreadingBridge(*args, **kw) finally: OverlayThreadingBridge.lock.release() return OverlayThreadingBridge.__single getInstance = staticmethod(getInstance) def resetSingleton(self): """ For testing purposes """ OverlayThreadingBridge.__single = None def register_bridge(self, secover, olapps): """ Called by MainThread """ self.secover = secover self.olapps = olapps secover.register_recv_callback(self.handleMessage) secover.register_conns_callback(self.handleConnection) # # SecOverlay interface # def register(self, launchmanycore, max_len): """ Called by MainThread """ self.secover.register(launchmanycore, max_len) # FOR TESTING ONLY self.iplport2oc = self.secover.iplport2oc def get_handler(self): return self.secover def start_listening(self): """ Called by MainThread """ self.secover.start_listening() def register_recv_callback(self, callback): """ Called by MainThread """ self.olappsmsghandler = callback def register_conns_callback(self, callback): """ Called by MainThread """ self.olappsconnhandler = callback def handleConnection(self, exc, permid, selversion, locally_initiated, hisdns): """ Called by NetworkThread """ # called by SecureOverlay.got_auth_connection() or cleanup_admin_and_callbacks() if DEBUG: print >> sys.stderr, "olbridge: handleConnection", exc, show_permid_short( permid), selversion, locally_initiated, hisdns, currentThread( ).getName() def olbridge_handle_conn_func(): # Called by OverlayThread if DEBUG: print >> sys.stderr, "olbridge: handle_conn_func", exc, show_permid_short( permid ), selversion, locally_initiated, hisdns, currentThread( ).getName() try: if hisdns: self.secover.add_peer_to_db(permid, hisdns, selversion) if self.olappsconnhandler is not None: # self.olappsconnhandler = OverlayApps.handleConnection self.olappsconnhandler(exc, permid, selversion, locally_initiated) except: print_exc() if isinstance(exc, CloseException): self.secover.update_peer_status(permid, exc.was_auth_done()) self.tqueue.add_task(olbridge_handle_conn_func, 0) def handleMessage(self, permid, selversion, message): """ Called by NetworkThread """ #ProxyService_ # # DEBUG #print "### olbridge: handleMessage", show_permid_short(permid), selversion, getMessageName(message[0]), currentThread().getName() # #_ProxyService if DEBUG: print >> sys.stderr, "olbridge: handleMessage", show_permid_short( permid), selversion, getMessageName( message[0]), currentThread().getName() def olbridge_handle_msg_func(): # Called by OverlayThread if DEBUG: print >> sys.stderr, "olbridge: handle_msg_func", show_permid_short( permid), selversion, getMessageName( message[0]), currentThread().getName() try: if self.olappsmsghandler is None: ret = True else: ret = self.olappsmsghandler(permid, selversion, message) except: print_exc() ret = False if ret == False: if DEBUG: print >> sys.stderr, "olbridge: olbridge_handle_msg_func closing!", show_permid_short( permid), selversion, getMessageName( message[0]), currentThread().getName() self.close(permid) self.tqueue.add_task(olbridge_handle_msg_func, 0) return True def connect_dns(self, dns, callback): """ Called by OverlayThread/NetworkThread """ if DEBUG: print >> sys.stderr, "olbridge: connect_dns", dns def olbridge_connect_dns_callback(cexc, cdns, cpermid, cselver): # Called by network thread if DEBUG: print >> sys.stderr, "olbridge: connect_dns_callback", cexc, cdns, show_permid_short( cpermid), cselver olbridge_connect_dns_callback_lambda = lambda: callback( cexc, cdns, cpermid, cselver) self.add_task(olbridge_connect_dns_callback_lambda, 0) self.secover.connect_dns(dns, olbridge_connect_dns_callback) def connect(self, permid, callback): """ Called by OverlayThread """ if DEBUG: print >> sys.stderr, "olbridge: connect", show_permid_short( permid), currentThread().getName() def olbridge_connect_callback(cexc, cdns, cpermid, cselver): # Called by network thread if DEBUG: print >> sys.stderr, "olbridge: connect_callback", cexc, cdns, show_permid_short( cpermid), cselver, callback, currentThread().getName() olbridge_connect_callback_lambda = lambda: callback( cexc, cdns, cpermid, cselver) # Jie: postpone to call this callback to schedule it after the peer has been added to buddycast connection list # Arno, 2008-09-15: No-no-no self.add_task(olbridge_connect_callback_lambda, 0) self.secover.connect(permid, olbridge_connect_callback) def send(self, permid, msg, callback): """ Called by OverlayThread """ if DEBUG: print >> sys.stderr, "olbridge: send", show_permid_short( permid), len(msg) def olbridge_send_callback(cexc, cpermid): # Called by network thread if DEBUG: print >> sys.stderr, "olbridge: send_callback", cexc, show_permid_short( cpermid) olbridge_send_callback_lambda = lambda: callback(cexc, cpermid) self.add_task(olbridge_send_callback_lambda, 0) self.secover.send(permid, msg, olbridge_send_callback) def close(self, permid): """ Called by OverlayThread """ self.secover.close(permid) def add_task(self, task, t=0, ident=None): """ Called by OverlayThread """ self.tqueue.add_task(task, t, ident)
class TestBuddyCast(unittest.TestCase): def setUp(self): # prepare database launchmany = FakeLauchMany() self.overlay_bridge = TimedTaskQueue() #self.overlay_bridge = FakeOverlayBridge() superpeer = False # enable it to test superpeer self.bc = BuddyCastFactory.getInstance(superpeer=superpeer) self.bc.register(self.overlay_bridge, launchmany, None, None, None, True) def tearDown(self): self.overlay_bridge.add_task('quit') print "Before join" def remove_t_index(self): indices = [ 'Torrent_length_idx', 'Torrent_creation_date_idx', 'Torrent_relevance_idx', 'Torrent_num_seeders_idx', 'Torrent_num_leechers_idx', #'Torrent_name_idx', ] for index in indices: sql = 'drop index ' + index self.data_handler.torrent_db._db.execute_write(sql) def remove_p_index(self): indices = [ 'Peer_name_idx', 'Peer_ip_idx', 'Peer_similarity_idx', 'Peer_last_seen_idx', 'Peer_last_connected_idx', 'Peer_num_peers_idx', 'Peer_num_torrents_idx' ] for index in indices: sql = 'drop index ' + index self.data_handler.peer_db._db.execute_write(sql) def local_test(self): self.remove_t_index() self.remove_p_index() from Tribler.Test.log_parser import get_buddycast_data #start_time = time() #print >> sys.stderr, "buddycast: ******************* start local test" costs = [] self.data_handler.postInit(updatesim=False) for permid, selversion, msg in get_buddycast_data( os.path.join(FILES_DIR, 'superpeer120070902sp7001.log')): message = bencode(msg) #print 'got msg:', permid, selversion, message try: s = time() self.bc.gotBuddyCastMessage(message, permid, selversion) cost = time() - s costs.append(cost) except: print_exc() break print 'got msg: %d %.2f %.2f %.2f %.2f' % (len(costs), cost, min(costs), sum(costs) / len(costs), max(costs)) # with all indices, min/avg/max: 0.00 1.78 4.57 seconds # without index, min/avg/max: 0.00 1.38 3.43 seconds (58) print "Done" def test_start(self): try: self.bc.olthread_register(start=False) self.data_handler = self.bc.data_handler self.local_test() print "Sleeping for 10 secs" sleep(10) print "Done2" except: print_exc() self.assert_(False)
def setUp(self): self.ntasks = 0 self.completed = [] self.guiserver = TimedTaskQueue()
def __init__(self): from Tribler.Utilities.TimedTaskQueue import TimedTaskQueue self.queue = TimedTaskQueue("FakeCallback")
class RemoteTorrentHandler: __single = None def __init__(self): RemoteTorrentHandler.__single = self self.registered = False self._searchcommunity = None self.callbacks = {} self.trequesters = {} self.mrequesters = {} self.drequesters = {} self.tnrequester = None self.num_torrents = 0 def getInstance(*args, **kw): if RemoteTorrentHandler.__single is None: RemoteTorrentHandler(*args, **kw) return RemoteTorrentHandler.__single getInstance = staticmethod(getInstance) def delInstance(*args, **kw): RemoteTorrentHandler.__single = None delInstance = staticmethod(delInstance) def register(self, dispersy, database_thead, session, max_num_torrents): self.session = session self.dispersy = dispersy self.database_thead = database_thead self.max_num_torrents = max_num_torrents self.tor_col_dir = self.session.get_torrent_collecting_dir() from Tribler.Utilities.TimedTaskQueue import TimedTaskQueue self.tqueue = TimedTaskQueue("RemoteTorrentHandler") self.scheduletask = self.tqueue.add_task self.torrent_db = None if self.session.get_megacache(): self.torrent_db = session.open_dbhandler('torrents') self.database_thead.register(self.__check_overflow, delay=30.0) if session.get_dht_torrent_collecting(): self.drequesters[0] = MagnetRequester(self, 0) self.drequesters[1] = MagnetRequester(self, 1) self.tnrequester = ThumbnailRequester(self, self.session) self.registered = True def is_registered(self): return self.registered def shutdown(self): self.tqueue.shutdown(True) def set_max_num_torrents(self, max_num_torrents): self.max_num_torrents = max_num_torrents def __check_overflow(self): while True: self.num_torrents = self.torrent_db.getNumberCollectedTorrents() if DEBUG: print >> sys.stderr, "rtorrent: check overflow: current", self.num_torrents, "max", self.max_num_torrents if self.num_torrents > self.max_num_torrents: num_delete = int(self.num_torrents - self.max_num_torrents * 0.95) num_per_step = max(25, num_delete / 180) print >> sys.stderr, "rtorrent: ** limit space::", self.num_torrents, self.max_num_torrents, num_delete while num_delete > 0: to_remove = min(num_delete, num_per_step) num_delete -= to_remove self.torrent_db.freeSpace(to_remove) yield 5.0 LOW_PRIO_COLLECTING = 4 elif self.num_torrents > (self.max_num_torrents * .75): LOW_PRIO_COLLECTING = 3 else: LOW_PRIO_COLLECTING = 2 if DEBUG: print >> sys.stderr, "rtorrent: setting low_prio_collection to one .torrent every %.1f seconds" % (LOW_PRIO_COLLECTING * .5) yield 30 * 60.0 # run every 30 minutes @property def searchcommunity(self): if self.registered: if not self._searchcommunity: from Tribler.community.search.community import SearchCommunity for community in self.dispersy.get_communities(): if isinstance(community, SearchCommunity): self._searchcommunity = community break return self._searchcommunity def has_thumbnail(self, infohash): thumb_dir = os.path.join(self.tor_col_dir, 'thumbs-' + binascii.hexlify(infohash)) return os.path.isdir(thumb_dir) and os.listdir(thumb_dir) def download_thumbnail(self, candidate, roothash, infohash, usercallback=None, timeout=None): if self.registered and not self.has_thumbnail(roothash): raw_lambda = lambda candidate = candidate, roothash = roothash, infohash = infohash, usercallback = usercallback, timeout = timeout: self._download_thumbnail(candidate, roothash, infohash, usercallback, timeout) self.scheduletask(raw_lambda) def _download_thumbnail(self, candidate, roothash, infohash, usercallback, timeout): if usercallback: self.callbacks.setdefault(roothash, set()).add(usercallback) self.tnrequester.add_request((roothash, infohash), candidate, timeout) if DEBUG: print >> sys.stderr, 'rtorrent: adding thumbnail request:', roothash or '', candidate def download_torrent(self, candidate, infohash=None, roothash=None, usercallback=None, prio=1, timeout=None): if self.registered: raw_lambda = lambda candidate = candidate, infohash = infohash, roothash = roothash, usercallback = usercallback, prio = prio, timeout = timeout: self._download_torrent(candidate, infohash, roothash, usercallback, prio, timeout) self.scheduletask(raw_lambda) def _download_torrent(self, candidate, infohash, roothash, usercallback, prio, timeout): if self.registered: assert infohash or roothash, "We need either the info or roothash" doSwiftCollect = candidate and roothash if doSwiftCollect: requesters = self.trequesters hash = (infohash, roothash) elif infohash: requesters = self.drequesters hash = infohash # fix prio levels to 1 and 0 prio = min(prio, 1) else: return if usercallback: self.callbacks.setdefault(hash, set()).add(usercallback) # look for lowest prio requester, which already has this infohash scheduled requester = None for i in range(prio, prio + 1): if i in requesters and requesters[i].is_being_requested(hash): requester = requesters[i] break # if not found, then used/create this requester if not requester: if prio not in requesters: if doSwiftCollect: requesters[prio] = TorrentRequester(self, self.drequesters.get(1, None), self.session, prio) elif self.session.get_dht_torrent_collecting(): requesters[prio] = MagnetRequester(self, prio) requester = requesters[prio] # make request if requester: requester.add_request(hash, candidate, timeout) if DEBUG: print >> sys.stderr, 'rtorrent: adding torrent request:', bin2str(infohash or ''), bin2str(roothash or ''), candidate, prio def download_torrentmessages(self, candidate, infohashes, usercallback=None, prio=1): if self.registered: raw_lambda = lambda candidate = candidate, infohashes = infohashes, usercallback = usercallback, prio = prio: self._download_torrentmessages(candidate, infohashes, usercallback, prio) self.scheduletask(raw_lambda) def _download_torrentmessages(self, candidate, infohashes, usercallback, prio): assert all(isinstance(infohash, str) for infohash in infohashes), "INFOHASH has invalid type" assert all(len(infohash) == INFOHASH_LENGTH for infohash in infohashes), "INFOHASH has invalid length:" if self.registered: if usercallback: for infohash in infohashes: callback = lambda infohash = infohash: usercallback(infohash) self.callbacks.setdefault((infohash, None), set()).add(callback) if prio not in self.mrequesters: self.mrequesters[prio] = TorrentMessageRequester(self, self.searchcommunity, prio) requester = self.mrequesters[prio] # make request requester.add_request(frozenset(infohashes), candidate) if DEBUG: print >> sys.stderr, 'rtorrent: adding torrent messages request:', map(bin2str, infohashes), candidate, prio def has_torrent(self, infohash, callback): if self.torrent_db: self.database_thead.register(self._has_torrent, args=(infohash, self.tor_col_dir, callback)) else: callback(False) def _has_torrent(self, infohash, tor_col_dir, callback): # save torrent result = False torrent = self.torrent_db.getTorrent(infohash, ['torrent_file_name', 'swift_torrent_hash'], include_mypref=False) if torrent: if torrent.get('torrent_file_name', False) and os.path.isfile(torrent['torrent_file_name']): result = torrent['torrent_file_name'] elif torrent.get('swift_torrent_hash', False): sdef = SwiftDef(torrent['swift_torrent_hash']) torrent_filename = os.path.join(tor_col_dir, sdef.get_roothash_as_hex()) if os.path.isfile(torrent_filename): self.torrent_db.updateTorrent(infohash, notify=False, torrent_file_name=torrent_filename) result = torrent_filename raw_lambda = lambda result = result: callback(result) self.scheduletask(raw_lambda) def save_torrent(self, tdef, callback=None): if self.registered: def do_schedule(filename): if not filename: self._save_torrent(tdef, callback) elif callback: self.database_thead.register(callback) infohash = tdef.get_infohash() self.has_torrent(infohash, do_schedule) def _save_torrent(self, tdef, callback=None): tmp_filename = os.path.join(self.session.get_torrent_collecting_dir(), "tmp_" + get_collected_torrent_filename(tdef.get_infohash())) filename_index = 0 while os.path.exists(tmp_filename): filename_index += 1 tmp_filename = os.path.join(self.session.get_torrent_collecting_dir(), ("tmp_%d_" % filename_index) + get_collected_torrent_filename(tdef.get_infohash())) tdef.save(tmp_filename) sdef, swiftpath = self._write_to_collected(tmp_filename) try: os.remove(tmp_filename) except: atexit.register(lambda tmp_filename=tmp_filename: os.remove(tmp_filename)) def do_db(callback): # add this new torrent to db infohash = tdef.get_infohash() if self.torrent_db.hasTorrent(infohash): self.torrent_db.updateTorrent(infohash, swift_torrent_hash=sdef.get_roothash(), torrent_file_name=swiftpath) else: self.torrent_db.addExternalTorrent(tdef, extra_info={'filename': swiftpath, 'swift_torrent_hash': sdef.get_roothash(), 'status': 'good'}) # notify all self.notify_possible_torrent_infohash(infohash, True) if callback: callback() if self.torrent_db: self.database_thead.register(do_db, args=(callback,)) elif callback: callback() def _write_to_collected(self, filename): # calculate root-hash sdef = SwiftDef() sdef.add_content(filename) sdef.finalize(self.session.get_swift_path(), destdir=self.session.get_torrent_collecting_dir()) mfpath = os.path.join(self.session.get_torrent_collecting_dir(), sdef.get_roothash_as_hex()) if not os.path.exists(mfpath): download = self.session.get_download(sdef.get_roothash()) if download: self.session.remove_download(download, removestate=True) sleep(1) elif os.path.exists(mfpath + ".mhash"): # indicating failed swift download os.remove(mfpath + ".mhash") try: shutil.copy(filename, mfpath) shutil.move(filename + '.mhash', mfpath + '.mhash') shutil.move(filename + '.mbinmap', mfpath + '.mbinmap') except: print_exc() return sdef, mfpath def notify_possible_torrent_roothash(self, roothash): keys = self.callbacks.keys() for key in keys: if key[1] == roothash: handle_lambda = lambda key = key: self._handleCallback(key, True) self.scheduletask(handle_lambda) def do_db(tdef): if self.torrent_db.hasTorrent(tdef.get_infohash()): self.torrent_db.updateTorrent(tdef.get_infohash(), swift_torrent_hash=sdef.get_roothash(), torrent_file_name=swiftpath) else: self.torrent_db._addTorrentToDB(tdef, source="SWIFT", extra_info={'filename': swiftpath, 'swift_torrent_hash': roothash, 'status': 'good'}, commit=True) sdef = SwiftDef(roothash) swiftpath = os.path.join(self.session.get_torrent_collecting_dir(), sdef.get_roothash_as_hex()) if os.path.exists(swiftpath) and self.torrent_db: try: tdef = TorrentDef.load(swiftpath) self.database_thead.register(do_db, args=(tdef,)) except: # ignore if tdef loading fails pass def notify_possible_thumbnail_roothash(self, roothash): keys = self.callbacks.keys() for key in keys: if key == roothash: handle_lambda = lambda key = key: self._handleCallback(key, True) self.scheduletask(handle_lambda) print >> sys.stderr, 'rtorrent: finished downloading thumbnail:', binascii.hexlify(roothash) def notify_possible_torrent_infohash(self, infohash, actualTorrent=False): keys = self.callbacks.keys() for key in keys: if key[0] == infohash or key == infohash: handle_lambda = lambda key = key, actualTorrent = actualTorrent: self._handleCallback(key, actualTorrent) self.scheduletask(handle_lambda) def _handleCallback(self, key, torrent=True): if DEBUG: print >> sys.stderr, 'rtorrent: got torrent for:', key if key in self.callbacks: for usercallback in self.callbacks[key]: self.session.uch.perform_usercallback(usercallback) del self.callbacks[key] if torrent: for requester in self.trequesters.values(): if requester.is_being_requested(key): requester.remove_request(key) for requester in self.drequesters.values(): if requester.is_being_requested(key): requester.remove_request(key) else: for requester in self.mrequesters.values(): if requester.is_being_requested(key): requester.remove_request(key) def getQueueSize(self): def getQueueSize(qname, requesters): qsize = {} for requester in requesters.itervalues(): if len(requester.sources): qsize[requester.prio] = len(requester.sources) items = qsize.items() if items: items.sort() return "%s: " % qname + ",".join(map(str, items)) return '' return ", ".join([qstring for qstring in [getQueueSize("TQueue", self.trequesters), getQueueSize("DQueue", self.drequesters), getQueueSize("MQueue", self.mrequesters)] if qstring]) def getQueueSuccess(self): def getQueueSuccess(qname, requesters): sum_requests = sum_success = sum_fail = sum_on_disk = 0 print_value = False for requester in requesters.itervalues(): if requester.requests_success >= 0: print_value = True sum_requests += requester.requests_made sum_success += requester.requests_success sum_fail += requester.requests_fail sum_on_disk += requester.requests_on_disk if print_value: return "%s: %d/%d" % (qname, sum_success, sum_requests), "%s: success %d, pending %d, on disk %d, failed %d" % (qname, sum_success, sum_requests - sum_success - sum_fail - sum_on_disk, sum_on_disk, sum_fail) return '', '' return [(qstring, qtooltip) for qstring, qtooltip in [getQueueSuccess("TQueue", self.trequesters), getQueueSuccess("DQueue", self.drequesters), getQueueSuccess("MQueue", self.mrequesters)] if qstring] def remove_all_requests(self): print >> sys.stderr, "ONLY USE FOR TESTING PURPOSES" for requester in self.trequesters.values() + self.mrequesters.values() + self.drequesters.values(): requester.remove_all_requests
def __init__(self): if GUITaskQueue.__single: raise RuntimeError, "GUITaskQueue is singleton" GUITaskQueue.__single = self TimedTaskQueue.__init__(self, nameprefix = "GUITaskQueue")