def setUp(self): # prepare database launchmany = FakeLauchMany() self.overlay_bridge = TimedTaskQueue(isDaemon=False) #self.overlay_bridge = FakeOverlayBridge() self.data_handler = DataHandler(launchmany, self.overlay_bridge, max_num_peers=2500)
def test_addTask(self): self.queue = TimedTaskQueue() self.count = 0 self.queue.add_task(self.task3a, 3) self.queue.add_task(self.task0, 0) self.queue.add_task(self.task3b, 3) self.queue.add_task(self.task2, 1) sleep(6) assert self.count == 11 del self.queue
def test_addTask0FIFO(self): self.queue = TimedTaskQueue() self.count = 0 self.queue.add_task(self.task0a, 0) self.queue.add_task(self.task0b, 0) self.queue.add_task(self.task0c, 0) self.queue.add_task(self.task0d, 0) sleep(6) assert self.count == 4 del self.queue
def setUp(self): # prepare database launchmany = FakeLauchMany() self.overlay_bridge = TimedTaskQueue(isDaemon=False) #self.overlay_bridge = FakeOverlayBridge() superpeer = False # enable it to test superpeer self.bc = BuddyCastFactory.getInstance(superpeer=superpeer) self.bc.register(self.overlay_bridge, launchmany, None, None, None, True)
def __init__(self): if OverlayThreadingBridge.__single: raise RuntimeError, "OverlayThreadingBridge is Singleton" OverlayThreadingBridge.__single = self self.secover = None self.olapps = None self.olappsmsghandler = None self.olappsconnhandler = None # Current impl of wrapper: single thread self.tqueue = TimedTaskQueue(nameprefix="Overlay")
class TestBuddyCastDataHandler(unittest.TestCase): def setUp(self): # prepare database launchmany = FakeLauchMany() self.overlay_bridge = TimedTaskQueue() # self.overlay_bridge = FakeOverlayBridge() self.data_handler = DataHandler(launchmany, self.overlay_bridge, max_num_peers=2500) def tearDown(self): self.overlay_bridge.add_task("quit") def test_postInit(self): # self.data_handler.postInit() self.data_handler.postInit(1, 50, 0, 50)
def setUp(self): # prepare database launchmany = FakeLauchMany() self.overlay_bridge = TimedTaskQueue() # self.overlay_bridge = FakeOverlayBridge() self.data_handler = DataHandler(launchmany, self.overlay_bridge, max_num_peers=2500)
class TestBuddyCastDataHandler(unittest.TestCase): def setUp(self): # prepare database launchmany = FakeLauchMany() self.overlay_bridge = TimedTaskQueue(isDaemon=False) #self.overlay_bridge = FakeOverlayBridge() self.data_handler = DataHandler(launchmany, self.overlay_bridge, max_num_peers=2500) def tearDown(self): self.overlay_bridge.add_task('quit') def test_postInit(self): #self.data_handler.postInit() self.data_handler.postInit(1, 50, 0, 50)
def setUp(self): # prepare database launchmany = FakeLauchMany() self.overlay_bridge = TimedTaskQueue() # self.overlay_bridge = FakeOverlayBridge() superpeer = False # enable it to test superpeer self.bc = BuddyCastFactory.getInstance(superpeer=superpeer) self.bc.register(self.overlay_bridge, launchmany, None, None, None, True)
def test_addTask0FIFO(self): self.queue = TimedTaskQueue() self.count = 0 self.queue.add_task(self.task0a, 0) self.queue.add_task(self.task0b, 0) self.queue.add_task(self.task0c, 0) self.queue.add_task(self.task0d, 0) sleep(6) assert self.count == 4 del self.queue
def test_addTask(self): self.queue = TimedTaskQueue() self.count = 0 self.queue.add_task(self.task3a, 3) self.queue.add_task(self.task0, 0) self.queue.add_task(self.task3b, 3) self.queue.add_task(self.task2, 1) sleep(6) assert self.count == 11 del self.queue
def __init__(self, redirectstderrout, appname, params, single_instance_checker, installdir, i2iport, sport): self.videoHTTPServer = VideoHTTPServer(VIDEOHTTP_LISTENPORT) self.videoHTTPServer.register(self.videoservthread_error_callback, self.videoservthread_set_status_callback) self.videoHTTPServer.background_serve() #self.searchHTTPServer = MultiHTTPServer(VIDEOHTTP_LISTENPORT+1) #self.searchHTTPServer.register(self.videoservthread_error_callback,self.videoservthread_set_status_callback) self.searchHTTPServer = self.videoHTTPServer BaseApp.__init__(self, redirectstderrout, appname, params, single_instance_checker, installdir, i2iport, sport) # SEARCH:P2P # Maps a query ID to the original searchstr, timestamp and all hits (local + remote) self.id2hits = Query2HitsMap() # Maps a URL path received by HTTP server to the requested resource, # reading or generating it dynamically. # # For saving .torrents received in hits to P2P searches using # SIMPLE+METADATA queries self.tqueue = TimedTaskQueue(nameprefix="BGTaskQueue") self.searchmapper = SearchPathMapper(self.s, self.id2hits, self.tqueue) self.hits2anypathmapper = Hits2AnyPathMapper(self.s, self.id2hits) self.searchHTTPServer.add_path_mapper(self.searchmapper) self.searchHTTPServer.add_path_mapper(self.hits2anypathmapper) self.searchHTTPServer.background_serve() self.searchurl = 'http://127.0.0.1:' + str( self.searchHTTPServer.get_port()) + URLPATH_SEARCH_PREFIX # Maps Downloads to a using InstanceConnection and streaminfo when it # plays. So it contains the Downloads in VOD mode for which there is # active interest from a plugin. # # At the moment each Download is used/owned by a single IC and a new # request for the same torrent will stop playback to the original IC # and resume it to the new user. # self.dusers = {} self.approxplayerstate = MEDIASTATE_STOPPED self.counter = 0 # counter for the stats reported periodically self.interval = 120 # report interval if sys.platform == "win32": # If the BG Process is started by the plug-in notify it with an event startupEvent = win32event.CreateEvent(None, 0, 0, 'startupEvent') win32event.SetEvent(startupEvent) win32api.CloseHandle( startupEvent ) # TODO : is it possible to avoid importing win32api just to close an handler?
def __init__(self): if OverlayThreadingBridge.__single: raise RuntimeError, "OverlayThreadingBridge is Singleton" OverlayThreadingBridge.__single = self self.secover = None self.olapps = None self.olappsmsghandler = None self.olappsconnhandler = None # Current impl of wrapper: single thread self.tqueue = TimedTaskQueue(nameprefix="Overlay")
class TestGUITaskQueue(unittest.TestCase): def setUp(self): self.ntasks = 0 self.completed = [] self.guiserver = TimedTaskQueue() def tearDown(self): sleep(2) self.completed.sort() if self.completed != range(self.ntasks): print "test failed",self.completed self.assert_(False) def test_simple(self): self.ntasks = 1 self.guiserver.add_task(lambda:self.task(0),0) def test_more(self): self.ntasks = 10 for i in range(self.ntasks): # lambda functions are evil, this is not the same as lambda:task(i) self.guiserver.add_task(self.define_task(i),0) def test_delay(self): self.ntasks = 1 self.guiserver.add_task(lambda:self.task(0),3) print "test: sleeping 5 secs so tasks gets executed" sleep(5) def test_delay2(self): self.ntasks = 2 self.guiserver.add_task(lambda:self.task(1),3) self.guiserver.add_task(lambda:self.task(0),1) print "test: sleeping 5 secs so tasks gets executed" sleep(5) def define_task(self,num): return lambda:self.task(num) def task(self,num): print "Running task",num self.completed.append(num)
class TestGUITaskQueue(unittest.TestCase): def setUp(self): self.ntasks = 0 self.completed = [] self.guiserver = TimedTaskQueue() def tearDown(self): sleep(2) self.completed.sort() if self.completed != range(self.ntasks): print "test failed", self.completed self.assert_(False) def test_simple(self): self.ntasks = 1 self.guiserver.add_task(lambda: self.task(0), 0) def test_more(self): self.ntasks = 10 for i in range(self.ntasks): # lambda functions are evil, this is not the same as lambda:task(i) self.guiserver.add_task(self.define_task(i), 0) def test_delay(self): self.ntasks = 1 self.guiserver.add_task(lambda: self.task(0), 3) print "test: sleeping 5 secs so tasks gets executed" sleep(5) def test_delay2(self): self.ntasks = 2 self.guiserver.add_task(lambda: self.task(1), 3) self.guiserver.add_task(lambda: self.task(0), 1) print "test: sleeping 5 secs so tasks gets executed" sleep(5) def define_task(self, num): return lambda: self.task(num) def task(self, num): print "Running task", num self.completed.append(num)
def updateDB(self, fromver, tover): # bring database up to version 2, if necessary if fromver < 2: sql = """ -- Patch for BuddyCast 4 ALTER TABLE MyPreference ADD COLUMN click_position INTEGER DEFAULT -1; ALTER TABLE MyPreference ADD COLUMN reranking_strategy INTEGER DEFAULT -1; ALTER TABLE Preference ADD COLUMN click_position INTEGER DEFAULT -1; ALTER TABLE Preference ADD COLUMN reranking_strategy INTEGER DEFAULT -1; CREATE TABLE ClicklogSearch ( peer_id INTEGER DEFAULT 0, torrent_id INTEGER DEFAULT 0, term_id INTEGER DEFAULT 0, term_order INTEGER DEFAULT 0 ); CREATE INDEX idx_search_term ON ClicklogSearch (term_id); CREATE INDEX idx_search_torrent ON ClicklogSearch (torrent_id); CREATE TABLE ClicklogTerm ( term_id INTEGER PRIMARY KEY AUTOINCREMENT DEFAULT 0, term VARCHAR(255) NOT NULL, times_seen INTEGER DEFAULT 0 NOT NULL ); CREATE INDEX idx_terms_term ON ClicklogTerm(term); """ self.execute_write(sql, commit=False) if fromver < 3: sql = """ -- Patch for Local Peer Discovery ALTER TABLE Peer ADD COLUMN is_local integer DEFAULT 0; """ self.execute_write(sql, commit=False) if fromver < 4: sql = """ -- V2: Patch for VoteCast DROP TABLE IF EXISTS ModerationCast; DROP INDEX IF EXISTS moderationcast_idx; DROP TABLE IF EXISTS Moderators; DROP INDEX IF EXISTS moderators_idx; DROP TABLE IF EXISTS VoteCast; DROP INDEX IF EXISTS votecast_idx; CREATE TABLE VoteCast ( mod_id text, voter_id text, vote integer, time_stamp integer ); CREATE INDEX mod_id_idx on VoteCast (mod_id); CREATE INDEX voter_id_idx on VoteCast (voter_id); CREATE UNIQUE INDEX votecast_idx ON VoteCast (mod_id, voter_id); --- patch for BuddyCast 5 : Creation of Popularity table and relevant stuff CREATE TABLE Popularity ( torrent_id INTEGER, peer_id INTEGER, msg_receive_time NUMERIC, size_calc_age NUMERIC, num_seeders INTEGER DEFAULT 0, num_leechers INTEGER DEFAULT 0, num_of_sources INTEGER DEFAULT 0 ); CREATE INDEX Message_receive_time_idx ON Popularity (msg_receive_time); CREATE INDEX Size_calc_age_idx ON Popularity (size_calc_age); CREATE INDEX Number_of_seeders_idx ON Popularity (num_seeders); CREATE INDEX Number_of_leechers_idx ON Popularity (num_leechers); CREATE UNIQUE INDEX Popularity_idx ON Popularity (torrent_id, peer_id, msg_receive_time); -- v4: Patch for ChannelCast, Search CREATE TABLE ChannelCast ( publisher_id text, publisher_name text, infohash text, torrenthash text, torrentname text, time_stamp integer, signature text ); CREATE INDEX pub_id_idx on ChannelCast (publisher_id); CREATE INDEX pub_name_idx on ChannelCast (publisher_name); CREATE INDEX infohash_ch_idx on ChannelCast (infohash); ---------------------------------------- CREATE TABLE InvertedIndex ( word text NOT NULL, torrent_id integer ); CREATE INDEX word_idx on InvertedIndex (word); CREATE UNIQUE INDEX invertedindex_idx on InvertedIndex (word,torrent_id); ---------------------------------------- -- Set all similarity to zero because we are using a new similarity -- function and the old values no longer correspond to the new ones UPDATE Peer SET similarity = 0; UPDATE Torrent SET relevance = 0; """ self.execute_write(sql, commit=False) if fromver < 5: sql = """ -------------------------------------- -- Creating Subtitles (future RichMetadata) DB ---------------------------------- CREATE TABLE Metadata ( metadata_id integer PRIMARY KEY ASC AUTOINCREMENT NOT NULL, publisher_id text NOT NULL, infohash text NOT NULL, description text, timestamp integer NOT NULL, signature text NOT NULL, UNIQUE (publisher_id, infohash), FOREIGN KEY (publisher_id, infohash) REFERENCES ChannelCast(publisher_id, infohash) ON DELETE CASCADE -- the fk constraint is not enforced by sqlite ); CREATE INDEX infohash_md_idx on Metadata(infohash); CREATE INDEX pub_md_idx on Metadata(publisher_id); CREATE TABLE Subtitles ( metadata_id_fk integer, subtitle_lang text NOT NULL, subtitle_location text, checksum text NOT NULL, UNIQUE (metadata_id_fk,subtitle_lang), FOREIGN KEY (metadata_id_fk) REFERENCES Metadata(metadata_id) ON DELETE CASCADE, -- the fk constraint is not enforced by sqlite -- ISO639-2 uses 3 characters for lang codes CONSTRAINT lang_code_length CHECK ( length(subtitle_lang) == 3 ) ); CREATE INDEX metadata_sub_idx on Subtitles(metadata_id_fk); -- Stores the subtitles that peers have as an integer bitmask CREATE TABLE SubtitlesHave ( metadata_id_fk integer, peer_id text NOT NULL, have_mask integer NOT NULL, received_ts integer NOT NULL, --timestamp indicating when the mask was received UNIQUE (metadata_id_fk, peer_id), FOREIGN KEY (metadata_id_fk) REFERENCES Metadata(metadata_id) ON DELETE CASCADE, -- the fk constraint is not enforced by sqlite -- 32 bit unsigned integer CONSTRAINT have_mask_length CHECK (have_mask >= 0 AND have_mask < 4294967296) ); CREATE INDEX subtitles_have_idx on SubtitlesHave(metadata_id_fk); -- this index can boost queries -- ordered by timestamp on the SubtitlesHave DB CREATE INDEX subtitles_have_ts on SubtitlesHave(received_ts); """ self.execute_write(sql, commit=False) # updating version stepwise so if this works, we store it # regardless of later, potentially failing updates self.writeDBVersion(CURRENT_MAIN_DB_VERSION, commit=False) self.commit() # now the start the process of parsing the torrents to insert into # InvertedIndex table. if TEST_SQLITECACHEDB_UPGRADE: state_dir = "." else: from BaseLib.Core.Session import Session session = Session.get_instance() state_dir = session.get_state_dir() tmpfilename = os.path.join(state_dir, "upgradingdb.txt") if fromver < 4 or os.path.exists(tmpfilename): def upgradeTorrents(): # fetch some un-inserted torrents to put into the InvertedIndex sql = """ SELECT torrent_id, name, torrent_file_name FROM Torrent WHERE torrent_id NOT IN (SELECT DISTINCT torrent_id FROM InvertedIndex) AND torrent_file_name IS NOT NULL LIMIT 20""" records = self.fetchall(sql) if len(records) == 0: # upgradation is complete and hence delete the temp file os.remove(tmpfilename) if DEBUG: print >> sys.stderr, time.asctime(), "-", "DB Upgradation: temp-file deleted", tmpfilename return for torrent_id, name, torrent_file_name in records: try: abs_filename = os.path.join(session.get_torrent_collecting_dir(), torrent_file_name) if not os.path.exists(abs_filename): raise RuntimeError(".torrent file not found. Use fallback.") torrentdef = TorrentDef.load(abs_filename) torrent_name = torrentdef.get_name_as_unicode() keywords = Set(split_into_keywords(torrent_name)) for filename in torrentdef.get_files_as_unicode(): keywords.update(split_into_keywords(filename)) except: # failure... most likely the .torrent file # is invalid # use keywords from the torrent name # stored in the database torrent_name = dunno2unicode(name) keywords = Set(split_into_keywords(torrent_name)) # store the keywords in the InvertedIndex # table in the database if len(keywords) > 0: values = [(keyword, torrent_id) for keyword in keywords] self.executemany(u"INSERT OR REPLACE INTO InvertedIndex VALUES(?, ?)", values, commit=False) if DEBUG: print >> sys.stderr, time.asctime(), "-", "DB Upgradation: Extending the InvertedIndex table with", len( values ), "new keywords for", torrent_name # now commit, after parsing the batch of torrents self.commit() # upgradation not yet complete; comeback after 5 sec tqueue.add_task(upgradeTorrents, 5) # Create an empty file to mark the process of upgradation. # In case this process is terminated before completion of upgradation, # this file remains even though fromver >= 4 and hence indicating that # rest of the torrents need to be inserted into the InvertedIndex! # ensure the temp-file is created, if it is not already try: open(tmpfilename, "w") if DEBUG: print >> sys.stderr, time.asctime(), "-", "DB Upgradation: temp-file successfully created" except: if DEBUG: print >> sys.stderr, time.asctime(), "-", "DB Upgradation: failed to create temp-file" if DEBUG: print >> sys.stderr, time.asctime(), "-", "Upgrading DB .. inserting into InvertedIndex" from BaseLib.Utilities.TimedTaskQueue import TimedTaskQueue from sets import Set from BaseLib.Core.Search.SearchManager import split_into_keywords from BaseLib.Core.TorrentDef import TorrentDef # start the upgradation after 10 seconds tqueue = TimedTaskQueue("UpgradeDB") tqueue.add_task(upgradeTorrents, 10)
class TestBuddyCast(unittest.TestCase): def setUp(self): # prepare database launchmany = FakeLauchMany() self.overlay_bridge = TimedTaskQueue(isDaemon=False) #self.overlay_bridge = FakeOverlayBridge() superpeer = False # enable it to test superpeer self.bc = BuddyCastFactory.getInstance(superpeer=superpeer) self.bc.register(self.overlay_bridge, launchmany, None, None, None, True) def tearDown(self): self.overlay_bridge.add_task('quit') print "Before join" def remove_t_index(self): indices = [ 'Torrent_length_idx', 'Torrent_creation_date_idx', 'Torrent_relevance_idx', 'Torrent_num_seeders_idx', 'Torrent_num_leechers_idx', #'Torrent_name_idx', ] for index in indices: sql = 'drop index ' + index self.data_handler.torrent_db._db.execute_write(sql) def remove_p_index(self): indices = [ 'Peer_name_idx', 'Peer_ip_idx', 'Peer_similarity_idx', 'Peer_last_seen_idx', 'Peer_last_connected_idx', 'Peer_num_peers_idx', 'Peer_num_torrents_idx' ] for index in indices: sql = 'drop index ' + index self.data_handler.peer_db._db.execute_write(sql) def local_test(self): self.remove_t_index() self.remove_p_index() from BaseLib.Test.log_parser import get_buddycast_data #start_time = time() #print >> sys.stderr, "buddycast: ******************* start local test" costs = [] self.data_handler.postInit(updatesim=False) for permid, selversion, msg in get_buddycast_data( os.path.join(FILES_DIR, 'superpeer120070902sp7001.log')): message = bencode(msg) #print 'got msg:', permid, selversion, message try: s = time() self.bc.gotBuddyCastMessage(message, permid, selversion) cost = time() - s costs.append(cost) except: print_exc() break print 'got msg: %d %.2f %.2f %.2f %.2f' % (len(costs), cost, min(costs), sum(costs) / len(costs), max(costs)) # with all indices, min/avg/max: 0.00 1.78 4.57 seconds # without index, min/avg/max: 0.00 1.38 3.43 seconds (58) print "Done" def test_start(self): try: self.bc.olthread_register(start=False) self.data_handler = self.bc.data_handler self.local_test() print "Sleeping for 10 secs" sleep(10) print "Done2" except: print_exc() self.assert_(False)
def setUp(self): self.ntasks = 0 self.completed = [] self.guiserver = TimedTaskQueue()
def setUp(self): self.ntasks = 0 self.completed = [] self.guiserver = TimedTaskQueue()
class TestTimedTaskQueue(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_addTask(self): self.queue = TimedTaskQueue() self.count = 0 self.queue.add_task(self.task3a, 3) self.queue.add_task(self.task0, 0) self.queue.add_task(self.task3b, 3) self.queue.add_task(self.task2, 1) sleep(6) assert self.count == 11 del self.queue def task0(self): self.count += 1 assert self.count == 1 def task2(self): self.count += 2 assert self.count == 3 def task3a(self): self.count += 4 assert self.count == 7 or self.count == 11 def task3b(self): self.count += 4 assert self.count == 7 or self.count == 11 def test_addTask0FIFO(self): self.queue = TimedTaskQueue() self.count = 0 self.queue.add_task(self.task0a, 0) self.queue.add_task(self.task0b, 0) self.queue.add_task(self.task0c, 0) self.queue.add_task(self.task0d, 0) sleep(6) assert self.count == 4 del self.queue def task0a(self): assert self.count == 0 self.count = 1 def task0b(self): assert self.count == 1 self.count = 2 def task0c(self): assert self.count == 2 self.count = 3 def task0d(self): assert self.count == 3 self.count = 4
class TestTimedTaskQueue(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_addTask(self): self.queue = TimedTaskQueue() self.count = 0 self.queue.add_task(self.task3a, 3) self.queue.add_task(self.task0, 0) self.queue.add_task(self.task3b, 3) self.queue.add_task(self.task2, 1) sleep(6) assert self.count == 11 del self.queue def task0(self): self.count += 1 assert self.count == 1 def task2(self): self.count += 2 assert self.count == 3 def task3a(self): self.count += 4 assert self.count == 7 or self.count == 11 def task3b(self): self.count += 4 assert self.count == 7 or self.count == 11 def test_addTask0FIFO(self): self.queue = TimedTaskQueue() self.count = 0 self.queue.add_task(self.task0a, 0) self.queue.add_task(self.task0b, 0) self.queue.add_task(self.task0c, 0) self.queue.add_task(self.task0d, 0) sleep(6) assert self.count == 4 del self.queue def task0a(self): assert self.count == 0 self.count = 1 def task0b(self): assert self.count == 1 self.count = 2 def task0c(self): assert self.count == 2 self.count = 3 def task0d(self): assert self.count == 3 self.count = 4
class TestBuddyCast(unittest.TestCase): def setUp(self): # prepare database launchmany = FakeLauchMany() self.overlay_bridge = TimedTaskQueue() # self.overlay_bridge = FakeOverlayBridge() superpeer = False # enable it to test superpeer self.bc = BuddyCastFactory.getInstance(superpeer=superpeer) self.bc.register(self.overlay_bridge, launchmany, None, None, None, True) def tearDown(self): self.overlay_bridge.add_task("quit") print "Before join" def remove_t_index(self): indices = [ "Torrent_length_idx", "Torrent_creation_date_idx", "Torrent_relevance_idx", "Torrent_num_seeders_idx", "Torrent_num_leechers_idx", #'Torrent_name_idx', ] for index in indices: sql = "drop index " + index self.data_handler.torrent_db._db.execute_write(sql) def remove_p_index(self): indices = [ "Peer_name_idx", "Peer_ip_idx", "Peer_similarity_idx", "Peer_last_seen_idx", "Peer_last_connected_idx", "Peer_num_peers_idx", "Peer_num_torrents_idx", ] for index in indices: sql = "drop index " + index self.data_handler.peer_db._db.execute_write(sql) def local_test(self): self.remove_t_index() self.remove_p_index() from BaseLib.Test.log_parser import get_buddycast_data # start_time = time() # print >> sys.stderr, time.asctime(),'-', "buddycast: ******************* start local test" costs = [] self.data_handler.postInit(updatesim=False) for permid, selversion, msg in get_buddycast_data(os.path.join(FILES_DIR, "superpeer120070902sp7001.log")): message = bencode(msg) # print 'got msg:', permid, selversion, message try: s = time() self.bc.gotBuddyCastMessage(message, permid, selversion) cost = time() - s costs.append(cost) except: print_exc() break print "got msg: %d %.2f %.2f %.2f %.2f" % ( len(costs), cost, min(costs), sum(costs) / len(costs), max(costs), ) # with all indices, min/avg/max: 0.00 1.78 4.57 seconds # without index, min/avg/max: 0.00 1.38 3.43 seconds (58) print "Done" def test_start(self): try: self.bc.olthread_register(start=False) self.data_handler = self.bc.data_handler self.local_test() print "Sleeping for 10 secs" sleep(10) print "Done2" except: print_exc() self.assert_(False)
class OverlayThreadingBridge: __single = None lock = threading.Lock() def __init__(self): if OverlayThreadingBridge.__single: raise RuntimeError, "OverlayThreadingBridge is Singleton" OverlayThreadingBridge.__single = self self.secover = None self.olapps = None self.olappsmsghandler = None self.olappsconnhandler = None # Current impl of wrapper: single thread self.tqueue = TimedTaskQueue(nameprefix="Overlay") def getInstance(*args, **kw): # Singleton pattern with double-checking if OverlayThreadingBridge.__single is None: OverlayThreadingBridge.lock.acquire() try: if OverlayThreadingBridge.__single is None: OverlayThreadingBridge(*args, **kw) finally: OverlayThreadingBridge.lock.release() return OverlayThreadingBridge.__single getInstance = staticmethod(getInstance) def resetSingleton(self): """ For testing purposes """ OverlayThreadingBridge.__single = None def register_bridge(self,secover,olapps): """ Called by MainThread """ self.secover = secover self.olapps = olapps secover.register_recv_callback(self.handleMessage) secover.register_conns_callback(self.handleConnection) # # SecOverlay interface # def register(self,launchmanycore,max_len): """ Called by MainThread """ self.secover.register(launchmanycore,max_len) # FOR TESTING ONLY self.iplport2oc = self.secover.iplport2oc def get_handler(self): return self.secover def start_listening(self): """ Called by MainThread """ self.secover.start_listening() def register_recv_callback(self,callback): """ Called by MainThread """ self.olappsmsghandler = callback def register_conns_callback(self,callback): """ Called by MainThread """ self.olappsconnhandler = callback def handleConnection(self,exc,permid,selversion,locally_initiated,hisdns): """ Called by NetworkThread """ # called by SecureOverlay.got_auth_connection() or cleanup_admin_and_callbacks() if DEBUG: print >>sys.stderr,"olbridge: handleConnection",exc,show_permid_short(permid),selversion,locally_initiated,hisdns,currentThread().getName() def olbridge_handle_conn_func(): # Called by OverlayThread if DEBUG: print >>sys.stderr,"olbridge: handle_conn_func",exc,show_permid_short(permid),selversion,locally_initiated,hisdns,currentThread().getName() try: if hisdns: self.secover.add_peer_to_db(permid,hisdns,selversion) if self.olappsconnhandler is not None: # self.olappsconnhandler = OverlayApps.handleConnection self.olappsconnhandler(exc,permid,selversion,locally_initiated) except: print_exc() if isinstance(exc,CloseException): self.secover.update_peer_status(permid,exc.was_auth_done()) self.tqueue.add_task(olbridge_handle_conn_func,0) def handleMessage(self,permid,selversion,message): """ Called by NetworkThread """ #ProxyService_ # # DEBUG #print "### olbridge: handleMessage", show_permid_short(permid), selversion, getMessageName(message[0]), currentThread().getName() # #_ProxyService if DEBUG: print >>sys.stderr,"olbridge: handleMessage",show_permid_short(permid),selversion,getMessageName(message[0]),currentThread().getName() def olbridge_handle_msg_func(): # Called by OverlayThread if DEBUG: print >>sys.stderr,"olbridge: handle_msg_func",show_permid_short(permid),selversion,getMessageName(message[0]),currentThread().getName() try: if self.olappsmsghandler is None: ret = True else: ret = self.olappsmsghandler(permid,selversion,message) except: print_exc() ret = False if ret == False: if DEBUG: print >>sys.stderr,"olbridge: olbridge_handle_msg_func closing!",show_permid_short(permid),selversion,getMessageName(message[0]),currentThread().getName() self.close(permid) self.tqueue.add_task(olbridge_handle_msg_func,0) return True def connect_dns(self,dns,callback): """ Called by OverlayThread/NetworkThread """ if DEBUG: print >>sys.stderr,"olbridge: connect_dns",dns def olbridge_connect_dns_callback(cexc,cdns,cpermid,cselver): # Called by network thread if DEBUG: print >>sys.stderr,"olbridge: connect_dns_callback",cexc,cdns,show_permid_short(cpermid),cselver olbridge_connect_dns_callback_lambda = lambda:callback(cexc,cdns,cpermid,cselver) self.add_task(olbridge_connect_dns_callback_lambda,0) self.secover.connect_dns(dns,olbridge_connect_dns_callback) def connect(self,permid,callback): """ Called by OverlayThread """ if DEBUG: print >>sys.stderr,"olbridge: connect",show_permid_short(permid), currentThread().getName() def olbridge_connect_callback(cexc,cdns,cpermid,cselver): # Called by network thread if DEBUG: print >>sys.stderr,"olbridge: connect_callback",cexc,cdns,show_permid_short(cpermid),cselver, callback, currentThread().getName() olbridge_connect_callback_lambda = lambda:callback(cexc,cdns,cpermid,cselver) # Jie: postpone to call this callback to schedule it after the peer has been added to buddycast connection list # Arno, 2008-09-15: No-no-no self.add_task(olbridge_connect_callback_lambda,0) self.secover.connect(permid,olbridge_connect_callback) def send(self,permid,msg,callback): """ Called by OverlayThread """ if DEBUG: print >>sys.stderr,"olbridge: send",show_permid_short(permid),len(msg) def olbridge_send_callback(cexc,cpermid): # Called by network thread if DEBUG: print >>sys.stderr,"olbridge: send_callback",cexc,show_permid_short(cpermid) olbridge_send_callback_lambda = lambda:callback(cexc,cpermid) self.add_task(olbridge_send_callback_lambda,0) self.secover.send(permid,msg,olbridge_send_callback) def close(self,permid): """ Called by OverlayThread """ self.secover.close(permid) def add_task(self,task,t=0,ident=None): """ Called by OverlayThread """ self.tqueue.add_task(task,t,ident)
class OverlayThreadingBridge: __single = None lock = threading.Lock() def __init__(self): if OverlayThreadingBridge.__single: raise RuntimeError, "OverlayThreadingBridge is Singleton" OverlayThreadingBridge.__single = self self.secover = None self.olapps = None self.olappsmsghandler = None self.olappsconnhandler = None # Current impl of wrapper: single thread self.tqueue = TimedTaskQueue(nameprefix="Overlay") def getInstance(*args, **kw): # Singleton pattern with double-checking if OverlayThreadingBridge.__single is None: OverlayThreadingBridge.lock.acquire() try: if OverlayThreadingBridge.__single is None: OverlayThreadingBridge(*args, **kw) finally: OverlayThreadingBridge.lock.release() return OverlayThreadingBridge.__single getInstance = staticmethod(getInstance) def register_bridge(self, secover, olapps): """ Called by MainThread """ self.secover = secover self.olapps = olapps secover.register_recv_callback(self.handleMessage) secover.register_conns_callback(self.handleConnection) # # SecOverlay interface # def register(self, launchmanycore, max_len): """ Called by MainThread """ self.secover.register(launchmanycore, max_len) # FOR TESTING ONLY self.iplport2oc = self.secover.iplport2oc def get_handler(self): return self.secover def start_listening(self): """ Called by MainThread """ self.secover.start_listening() def register_recv_callback(self, callback): """ Called by MainThread """ self.olappsmsghandler = callback def register_conns_callback(self, callback): """ Called by MainThread """ self.olappsconnhandler = callback def handleConnection(self, exc, permid, selversion, locally_initiated, hisdns): """ Called by NetworkThread """ # called by SecureOverlay.got_auth_connection() or cleanup_admin_and_callbacks() if DEBUG: print >> sys.stderr, "olbridge: handleConnection", exc, show_permid_short( permid), selversion, locally_initiated, hisdns, currentThread( ).getName() def olbridge_handle_conn_func(): # Called by OverlayThread if DEBUG: print >> sys.stderr, "olbridge: handle_conn_func", exc, show_permid_short( permid ), selversion, locally_initiated, hisdns, currentThread( ).getName() try: if hisdns: self.secover.add_peer_to_db(permid, hisdns, selversion) if self.olappsconnhandler is not None: # self.olappsconnhandler = OverlayApps.handleConnection self.olappsconnhandler(exc, permid, selversion, locally_initiated) except: print_exc() if isinstance(exc, CloseException): self.secover.update_peer_status(permid, exc.was_auth_done()) self.tqueue.add_task(olbridge_handle_conn_func, 0) def handleMessage(self, permid, selversion, message): """ Called by NetworkThread """ if DEBUG: print >> sys.stderr, "olbridge: handleMessage", show_permid_short( permid), selversion, getMessageName( message[0]), currentThread().getName() def olbridge_handle_msg_func(): # Called by OverlayThread if DEBUG: print >> sys.stderr, "olbridge: handle_msg_func", show_permid_short( permid), selversion, getMessageName( message[0]), currentThread().getName() try: if self.olappsmsghandler is None: ret = True else: ret = self.olappsmsghandler(permid, selversion, message) except: print_exc() ret = False if ret == False: self.close(permid) self.tqueue.add_task(olbridge_handle_msg_func, 0) return True def connect_dns(self, dns, callback): """ Called by OverlayThread/NetworkThread """ if DEBUG: print >> sys.stderr, "olbridge: connect_dns", dns def olbridge_connect_dns_callback(cexc, cdns, cpermid, cselver): # Called by network thread if DEBUG: print >> sys.stderr, "olbridge: connect_dns_callback", cexc, cdns, show_permid_short( cpermid), cselver olbridge_connect_dns_callback_lambda = lambda: callback( cexc, cdns, cpermid, cselver) self.add_task(olbridge_connect_dns_callback_lambda, 0) self.secover.connect_dns(dns, olbridge_connect_dns_callback) def connect(self, permid, callback): """ Called by OverlayThread """ if DEBUG: print >> sys.stderr, "olbridge: connect", show_permid_short( permid), currentThread().getName() def olbridge_connect_callback(cexc, cdns, cpermid, cselver): # Called by network thread if DEBUG: print >> sys.stderr, "olbridge: connect_callback", cexc, cdns, show_permid_short( cpermid), cselver, callback, currentThread().getName() olbridge_connect_callback_lambda = lambda: callback( cexc, cdns, cpermid, cselver) # Jie: postpone to call this callback to schedule it after the peer has been added to buddycast connection list # Arno, 2008-09-15: No-no-no self.add_task(olbridge_connect_callback_lambda, 0) self.secover.connect(permid, olbridge_connect_callback) def send(self, permid, msg, callback): """ Called by OverlayThread """ if DEBUG: print >> sys.stderr, "olbridge: send", show_permid_short( permid), len(msg) def olbridge_send_callback(cexc, cpermid): # Called by network thread if DEBUG: print >> sys.stderr, "olbridge: send_callback", cexc, show_permid_short( cpermid) olbridge_send_callback_lambda = lambda: callback(cexc, cpermid) self.add_task(olbridge_send_callback_lambda, 0) self.secover.send(permid, msg, olbridge_send_callback) def close(self, permid): """ Called by OverlayThread """ self.secover.close(permid) def add_task(self, task, t=0, ident=None): """ Called by OverlayThread """ self.tqueue.add_task(task, t, ident)