def __init__(self, master, integrate_with_tribler=True, auto_join_channel=False): super(AllChannelCommunity, self).__init__(master) self.integrate_with_tribler = integrate_with_tribler self.auto_join_channel = auto_join_channel if self.integrate_with_tribler: from Tribler.Core.CacheDB.SqliteCacheDBHandler import ChannelCastDBHandler, VoteCastDBHandler, PeerDBHandler # tribler channelcast database self._channelcast_db = ChannelCastDBHandler.getInstance() self._votecast_db = VoteCastDBHandler.getInstance() self._peer_db = PeerDBHandler.getInstance() else: self._channelcast_db = ChannelCastDBStub(self._dispersy) self._votecast_db = VoteCastDBStub(self._dispersy) self._peer_db = PeerDBStub(self._dispersy) self._register_task = self.dispersy.callback.register # TODO: rewrite create_channelcast to use a generator and add the callback id to # _pending_callbacks for memory cleanup (otherwise the community will still exist in memory # once unloaded) self._register_task(self.create_channelcast, delay=CHANNELCAST_FIRST_MESSAGE) # 15/02/12 Boudewijn: add the callback id to _pending_callbacks to allow the task to be # unregistered when the community is unloaded self._pending_callbacks.append( self._register_task(self.unload_preview, priority=-128)) self._blocklist = {} self._searchCallbacks = {}
def __init__(self, data_handler, secure_overlay, session, buddycast_interval_function, log = '', dnsindb = None): """ Returns an instance of this class """ #Keep reference to interval-function of BuddycastFactory self.interval = buddycast_interval_function self.data_handler = data_handler self.dnsindb = dnsindb self.log = log self.peerdb = PeerDBHandler.getInstance() self.votecastdb = VoteCastDBHandler.getInstance() self.session = session self.my_permid = session.get_permid() self.max_length = SINGLE_VOTECAST_LENGTH * (session.get_votecast_random_votes() + session.get_votecast_recent_votes()) #Reference to buddycast-core, set by the buddycast-core (as it is created by the #buddycast-factory after calling this constructor). self.buddycast_core = None self.notifier = Notifier.getInstance() #Extend logging with VoteCast-messages and status if self.log: self.overlay_log = OverlayLogger.getInstance(self.log)
def __init__(self, master, integrate_with_tribler = True, auto_join_channel = False): super(AllChannelCommunity, self).__init__(master) self.integrate_with_tribler = integrate_with_tribler self.auto_join_channel = auto_join_channel if self.integrate_with_tribler: from Tribler.Core.CacheDB.SqliteCacheDBHandler import ChannelCastDBHandler, VoteCastDBHandler, PeerDBHandler # tribler channelcast database self._channelcast_db = ChannelCastDBHandler.getInstance() self._votecast_db = VoteCastDBHandler.getInstance() self._peer_db = PeerDBHandler.getInstance() else: self._channelcast_db = ChannelCastDBStub(self._dispersy) self._votecast_db = VoteCastDBStub(self._dispersy) self._peer_db = PeerDBStub(self._dispersy) self._register_task = self.dispersy.callback.register self._register_task(self.create_channelcast, delay=CHANNELCAST_FIRST_MESSAGE) # 15/02/12 Boudewijn: add the callback id to _pending_callbacks to allow the task to be # unregistered when the community is unloaded self._pending_callbacks.append(self._register_task(self.unload_preview, priority=-128)) self._blocklist = {} self._searchCallbacks = {}
def __init__(self, data_handler, secure_overlay, session, buddycast_interval_function, log='', dnsindb=None): """ Returns an instance of this class """ #Keep reference to interval-function of BuddycastFactory self.interval = buddycast_interval_function self.data_handler = data_handler self.dnsindb = dnsindb self.log = log self.peerdb = PeerDBHandler.getInstance() self.votecastdb = VoteCastDBHandler.getInstance() self.session = session self.my_permid = session.get_permid() self.max_length = SINGLE_VOTECAST_LENGTH * ( session.get_votecast_random_votes() + session.get_votecast_recent_votes()) #Reference to buddycast-core, set by the buddycast-core (as it is created by the #buddycast-factory after calling this constructor). self.buddycast_core = None self.notifier = Notifier.getInstance() #Extend logging with VoteCast-messages and status if self.log: self.overlay_log = OverlayLogger.getInstance(self.log)
def singtest_send_opened(self): print >> sys.stderr,"test: test_send_opened" self.wanted = True self.wanted2 = True peer_db = PeerDBHandler.getInstance() hispermid = self.peer2.my_permid peer_db.addPeer(hispermid,{'ip':"127.0.0.1", 'port':5678}) msg = GET_METADATA+'12345678901234567890' self.peer1.secure_overlay.connect(hispermid,lambda e,d,p,s: self.send_opened_connect_callback(e,d,p,s,msg))
def test_loadSuperPeer(self): """ The SuperPeerDBHandler constructor writes the superpeers to the PeerDB """ self.splist.loadSuperPeers(self.config, True) assert self.splist.size() == 3, self.splist.size() self.peer_db = PeerDBHandler.getInstance() # Arno: must be 3, as there is a duplicate PermID in the lines list assert self.peer_db.size() == 3, self.peer_db.size()
def singtest_send_unopenedB(self): print >> sys.stderr,"test: test_send_unopenedB" self.wanted = True peer_db = PeerDBHandler.getInstance() hispermid = self.peer2.my_permid peer_db.addPeer(hispermid,{'ip':"127.0.0.1", 'port':5678}) self.peer1.secure_overlay.send(hispermid,'msg=bla',self.send_unopenedB_send_callback) sleep(2) # let rawserver thread close connection, which should succeed self.assert_(len(self.peer1.secure_overlay.iplport2oc) == 0)
def singtest_send_unopenedB(self): print >> sys.stderr, "test: test_send_unopenedB" self.wanted = True peer_db = PeerDBHandler.getInstance() hispermid = self.peer2.my_permid peer_db.addPeer(hispermid, {'ip': "127.0.0.1", 'port': 5678}) self.peer1.secure_overlay.send(hispermid, 'msg=bla', self.send_unopenedB_send_callback) sleep(2) # let rawserver thread close connection, which should succeed self.assert_(len(self.peer1.secure_overlay.iplport2oc) == 0)
def setUp(self): AbstractDB.setUp(self) self.p1 = str2bin('MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAAA6SYI4NHxwQ8P7P8QXgWAP+v8SaMVzF5+fSUHdAMrs6NvL5Epe1nCNSdlBHIjNjEiC5iiwSFZhRLsr') self.p2 = str2bin('MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAABo69alKy95H7RHzvDCsolAurKyrVvtDdT9/DzNAGvky6YejcK4GWQXBkIoQGQgxVEgIn8dwaR9B+3U') fake_permid_x = 'fake_permid_x' + '0R0\x10\x00\x07*\x86H\xce=\x02\x01\x06\x05+\x81\x04\x00\x1a\x03>\x00\x04' hp = self.sqlitedb.hasPeer(fake_permid_x) assert not hp self.pdb = PeerDBHandler.getInstance()
def test_findPeers(self): find_list = self.pdb.findPeers('ip', '88.88.88.88') assert len(find_list) == 16 find_list = self.pdb.findPeers('ip', '1.2.3.4') assert len(find_list) == 0 self.pdb = PeerDBHandler.getInstance() find_list = self.pdb.findPeers('permid', self.p1) assert len(find_list) == 1 and find_list[0]['permid'] == self.p1
def singtest_send_opened(self): print >> sys.stderr, "test: test_send_opened" self.wanted = True self.wanted2 = True peer_db = PeerDBHandler.getInstance() hispermid = self.peer2.my_permid peer_db.addPeer(hispermid, {'ip': "127.0.0.1", 'port': 5678}) msg = GET_METADATA + '12345678901234567890' self.peer1.secure_overlay.connect( hispermid, lambda e, d, p, s: self.send_opened_connect_callback( e, d, p, s, msg))
def __init__(self, parent): HomePanel.__init__(self, parent, 'Top Contributors', LIST_BLUE) self.Layout() self.peerdb = PeerDBHandler.getInstance() self.barterdb = BarterCastDBHandler.getInstance() self.timer = wx.Timer(self) self.Bind(wx.EVT_TIMER, self._onTimer, self.timer) self.timer.Start(10000, False) self.RefreshList()
def __init__(self, parent): HomePanel.__init__(self, parent, 'Top Contributors' , LIST_BLUE) self.Layout() self.peerdb = PeerDBHandler.getInstance() self.barterdb = BarterCastDBHandler.getInstance() self.timer = wx.Timer(self) self.Bind(wx.EVT_TIMER, self._onTimer, self.timer) self.timer.Start(10000, False) self.RefreshList()
def singtest_got_conn_outgoing(self): print >> sys.stderr,"test: test_got_conn_outgoing" self.wanted = True self.wanted2 = True # register handler for connections self.peer1.secure_overlay.register_conns_callback(self.got_conn_outgoing_conns_callback) peer_db = PeerDBHandler.getInstance() hispermid = self.peer2.my_permid peer_db.addPeer(hispermid,{'ip':"127.0.0.1", 'port':5678}) msg = GET_METADATA+'12345678901234567890' self.peer1.secure_overlay.connect(hispermid,lambda e,d,p,s:self.got_conn_outgoing_connect_callback(e,d,p,s,msg))
def singtest_connect_to_live_peer(self): print >> sys.stderr,"test: test_connect_to_live_peer" self.wanted = True peer_db = PeerDBHandler.getInstance() hispermid = self.peer2.my_permid peer_db.addPeer(hispermid,{'ip':"127.0.0.1", 'port':5678}) self.peer1.secure_overlay.connect(hispermid,self.connect_to_live_peer_callback) sleep(2) # let rawserver thread establish connection, which should succeed self.assert_(len(self.peer1.secure_overlay.iplport2oc) == 1) self.assert_(self.peer1.secure_overlay.iplport2oc.has_key('127.0.0.1:5678'))
def singtest_connect_to_dead_peerB(self): print >> sys.stderr,"test: test_connect_to_dead_peerB" self.wanted = True peer_db = PeerDBHandler.getInstance() hispermid = self.peer2.my_permid peer_db.addPeer(hispermid, {'ip':"127.0.0.1", 'port':22220}) self.peer1.secure_overlay.connect(hispermid,self.connect_to_dead_peerB_callback) # Arno, 2009-04-23: was 2 secs, somehow the failed event comes in real slow now. sleep(4) # let rawserver thread establish connection, which should fail self.assert_(len(self.peer1.secure_overlay.iplport2oc) == 0)
def singtest_connect_to_dead_peerB(self): print >> sys.stderr, "test: test_connect_to_dead_peerB" self.wanted = True peer_db = PeerDBHandler.getInstance() hispermid = self.peer2.my_permid peer_db.addPeer(hispermid, {'ip': "127.0.0.1", 'port': 22220}) self.peer1.secure_overlay.connect(hispermid, self.connect_to_dead_peerB_callback) # Arno, 2009-04-23: was 2 secs, somehow the failed event comes in real slow now. sleep( 4) # let rawserver thread establish connection, which should fail self.assert_(len(self.peer1.secure_overlay.iplport2oc) == 0)
def singtest_receive(self): print >> sys.stderr, "test: test_receive" self.wanted = True self.wanted2 = True # register handler for messages self.peer2.secure_overlay.register_recv_callback( self.receive_msg_callback) peer_db = PeerDBHandler.getInstance() hispermid = self.peer2.my_permid peer_db.addPeer(hispermid, {'ip': "127.0.0.1", 'port': 5678}) msg = GET_METADATA + '12345678901234567890' self.peer1.secure_overlay.connect( hispermid, lambda e, d, p, s: self.receive_connect_callback(e, d, p, s, msg))
def singtest_connect_to_live_peer(self): print >> sys.stderr, "test: test_connect_to_live_peer" self.wanted = True peer_db = PeerDBHandler.getInstance() hispermid = self.peer2.my_permid peer_db.addPeer(hispermid, {'ip': "127.0.0.1", 'port': 5678}) self.peer1.secure_overlay.connect(hispermid, self.connect_to_live_peer_callback) sleep( 2 ) # let rawserver thread establish connection, which should succeed self.assert_(len(self.peer1.secure_overlay.iplport2oc) == 1) self.assert_( self.peer1.secure_overlay.iplport2oc.has_key('127.0.0.1:5678'))
def __init__(self, master, integrate_with_tribler = True, auto_join_channel = False): super(AllChannelCommunity, self).__init__(master) self.integrate_with_tribler = integrate_with_tribler self.auto_join_channel = auto_join_channel if self.integrate_with_tribler: from Tribler.Core.CacheDB.SqliteCacheDBHandler import ChannelCastDBHandler, VoteCastDBHandler, PeerDBHandler # tribler channelcast database self._channelcast_db = ChannelCastDBHandler.getInstance() self._votecast_db = VoteCastDBHandler.getInstance() self._peer_db = PeerDBHandler.getInstance() else: self._channelcast_db = ChannelCastDBStub(self._dispersy) self._votecast_db = VoteCastDBStub(self._dispersy) self._peer_db = PeerDBStub(self._dispersy) self._register_task = self.dispersy.callback.register self._register_task(self.create_channelcast, delay=CHANNELCAST_FIRST_MESSAGE) self._blocklist = {} self._searchCallbacks = {}
def register(self, session, sesslock): if not self.registered: self.registered = True self.session = session self.sesslock = sesslock self.downloads = {} config = session.sessconfig # Should be safe at startup self.upnp_ports = [] # Orig self.sessdoneflag = Event() self.rawserver = RawServer( self.sessdoneflag, config['timeout_check_interval'], config['timeout'], ipv6_enable=config['ipv6_enabled'], failfunc=self.rawserver_fatalerrorfunc, errorfunc=self.rawserver_nonfatalerrorfunc) self.rawserver.add_task(self.rawserver_keepalive, 1) self.listen_port = config['minport'] self.shutdownstarttime = None self.multihandler = MultiHandler(self.rawserver, self.sessdoneflag) # SWIFTPROC swift_exists = config['swiftproc'] and ( os.path.exists(config['swiftpath']) or os.path.exists(config['swiftpath'] + '.exe')) if swift_exists: from Tribler.Core.Swift.SwiftProcessMgr import SwiftProcessMgr self.spm = SwiftProcessMgr( config['swiftpath'], config['swiftcmdlistenport'], config['swiftdlsperproc'], self.session.get_swift_tunnel_listen_port(), self.sesslock) try: self.swift_process = self.spm.get_or_create_sp( self.session.get_swift_working_dir(), self.session.get_torrent_collecting_dir(), self.session.get_swift_tunnel_listen_port(), self.session.get_swift_tunnel_httpgw_listen_port(), self.session.get_swift_tunnel_cmdgw_listen_port()) self.upnp_ports.append( (self.session.get_swift_tunnel_listen_port(), 'UDP')) except OSError: # could not find/run swift print >> sys.stderr, "lmc: could not start a swift process" else: self.spm = None self.swift_process = None # Dispersy self.session.dispersy_member = None if config['dispersy']: from Tribler.dispersy.callback import Callback from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import RawserverEndpoint, TunnelEndpoint from Tribler.dispersy.community import HardKilledCommunity # set communication endpoint if config['dispersy-tunnel-over-swift'] and self.swift_process: endpoint = TunnelEndpoint(self.swift_process) else: endpoint = RawserverEndpoint(self.rawserver, config['dispersy_port']) callback = Callback("Dispersy") # WARNING NAME SIGNIFICANT working_directory = unicode(config['state_dir']) self.dispersy = Dispersy(callback, endpoint, working_directory) # TODO: see if we can postpone dispersy.start to improve GUI responsiveness. # However, for now we must start self.dispersy.callback before running # try_register(nocachedb, self.database_thread)! self.dispersy.start() print >> sys.stderr, "lmc: Dispersy is listening on port", self.dispersy.wan_address[ 1], "using", endpoint self.upnp_ports.append((self.dispersy.wan_address[1], 'UDP')) self.dispersy.callback.call(self.dispersy.define_auto_load, args=(HardKilledCommunity, ), kargs={'load': True}) # notify dispersy finished loading self.session.uch.notify(NTFY_DISPERSY, NTFY_STARTED, None) from Tribler.Core.permid import read_keypair from Tribler.dispersy.crypto import ec_to_public_bin, ec_to_private_bin keypair = read_keypair( self.session.get_permid_keypair_filename()) self.session.dispersy_member = callback.call( self.dispersy.get_member, (ec_to_public_bin(keypair), ec_to_private_bin(keypair))) self.database_thread = callback else: class FakeCallback(): def __init__(self): from Tribler.Utilities.TimedTaskQueue import TimedTaskQueue self.queue = TimedTaskQueue("FakeCallback") def register(self, call, args=(), kargs=None, delay=0.0, priority=0, id_=u"", callback=None, callback_args=(), callback_kargs=None, include_id=False): def do_task(): if kargs: call(*args, **kargs) else: call(*args) if callback: if callback_kargs: callback(*callback_args, **callback_kargs) else: callback(*callback_args) self.queue.add_task(do_task, t=delay) def shutdown(self, immediately=False): self.queue.shutdown(immediately) self.database_thread = FakeCallback() if config['megacache']: import Tribler.Core.CacheDB.cachedb as cachedb from Tribler.Core.CacheDB.SqliteCacheDBHandler import PeerDBHandler, TorrentDBHandler, MyPreferenceDBHandler, VoteCastDBHandler, ChannelCastDBHandler, NetworkBuzzDBHandler, UserEventLogDBHandler from Tribler.Category.Category import Category from Tribler.Core.Tag.Extraction import TermExtraction from Tribler.Core.CacheDB.sqlitecachedb import try_register if DEBUG: print >> sys.stderr, 'tlm: Reading Session state from', config[ 'state_dir'] nocachedb = cachedb.init(config, self.rawserver_fatalerrorfunc) try_register(nocachedb, self.database_thread) self.cat = Category.getInstance(config['install_dir']) self.term = TermExtraction.getInstance(config['install_dir']) self.peer_db = PeerDBHandler.getInstance() self.peer_db.registerConnectionUpdater(self.session) self.torrent_db = TorrentDBHandler.getInstance() self.torrent_db.register( os.path.abspath(config['torrent_collecting_dir'])) self.mypref_db = MyPreferenceDBHandler.getInstance() self.votecast_db = VoteCastDBHandler.getInstance() self.votecast_db.registerSession(self.session) self.channelcast_db = ChannelCastDBHandler.getInstance() self.channelcast_db.registerSession(self.session) self.nb_db = NetworkBuzzDBHandler.getInstance() self.ue_db = UserEventLogDBHandler.getInstance() if self.dispersy: self.dispersy.database.attach_commit_callback( self.channelcast_db._db.commitNow) else: config['torrent_checking'] = 0 self.rtorrent_handler = None if config['torrent_collecting']: from Tribler.Core.RemoteTorrentHandler import RemoteTorrentHandler self.rtorrent_handler = RemoteTorrentHandler()
class Peer(Thread): def __init__(self, testcase, port, secover): Thread.__init__(self) self.setDaemon(True) self.testcase = testcase self.doneflag = Event() config = {} config['timeout_check_interval'] = 100000 config['timeout'] = 100000 config['ipv6_enabled'] = 0 config['minport'] = port config['maxport'] = port + 5 config['random_port'] = 0 config['bind'] = '' config['ipv6_binds_v4'] = 0 config['max_message_length'] = 2**23 config['torrent_collecting_dir'] = config['state_dir'] = config[ 'install_dir'] = tempfile.mkdtemp() config['peer_icon_path'] = 'icons' self.rawserver = RawServer(self.doneflag, config['timeout_check_interval'], config['timeout'], ipv6_enable=config['ipv6_enabled'], failfunc=self.report_failure, errorfunc=self.report_error) while 1: try: self.listen_port = self.rawserver.find_and_bind( 0, config['minport'], config['maxport'], config['bind'], reuse=True, ipv6_socket_style=config['ipv6_binds_v4'], randomizer=config['random_port']) print >> sys.stderr, "test: Got listen port", self.listen_port break except socketerror, e: self.report_failure(str(e)) msg = "Couldn't not bind to listen port - " + str(e) self.report_failure(msg) return self.multihandler = MultiHandler(self.rawserver, self.doneflag) # Note: We don't want a singleton, we want # two different instances for peer1 and peer2 self.secure_overlay = secover self.my_keypair = EC.gen_params(EC.NID_sect233k1) self.my_keypair.gen_key() self.my_permid = str(self.my_keypair.pub().get_der()) self.session = FakeSession(self, self.my_keypair, self.my_permid, self.listen_port) self.peer_db = PeerDBHandler.getInstance() self.secure_overlay.register(self, config['max_message_length']) print >> sys.stderr, "Peer: Setting", self.secure_overlay.get_handler( ), "as handler at SocketHandler" self.rawserver.sockethandler.set_handler( self.secure_overlay.get_handler()) self.secure_overlay.start_listening() # Stupid rawserver goes into very long wait if there are no short # term tasks. Emulate this self.rawserver.add_task(self.dummy_task, 0)
def register(self, session, sesslock): if not self.registered: self.registered = True self.session = session self.sesslock = sesslock self.downloads = {} config = session.sessconfig # Should be safe at startup self.upnp_ports = [] # Orig self.sessdoneflag = Event() self.rawserver = RawServer(self.sessdoneflag, config['timeout_check_interval'], config['timeout'], ipv6_enable=config['ipv6_enabled'], failfunc=self.rawserver_fatalerrorfunc, errorfunc=self.rawserver_nonfatalerrorfunc) self.rawserver.add_task(self.rawserver_keepalive, 1) self.listen_port = config['minport'] self.shutdownstarttime = None self.multihandler = MultiHandler(self.rawserver, self.sessdoneflag) # SWIFTPROC swift_exists = config['swiftproc'] and (os.path.exists(config['swiftpath']) or os.path.exists(config['swiftpath'] + '.exe')) if swift_exists: from Tribler.Core.Swift.SwiftProcessMgr import SwiftProcessMgr self.spm = SwiftProcessMgr(config['swiftpath'], config['swiftcmdlistenport'], config['swiftdlsperproc'], self.session.get_swift_tunnel_listen_port(), self.sesslock) try: self.swift_process = self.spm.get_or_create_sp(self.session.get_swift_working_dir(), self.session.get_torrent_collecting_dir(), self.session.get_swift_tunnel_listen_port(), self.session.get_swift_tunnel_httpgw_listen_port(), self.session.get_swift_tunnel_cmdgw_listen_port()) self.upnp_ports.append((self.session.get_swift_tunnel_listen_port(), 'UDP')) except OSError: # could not find/run swift print >> sys.stderr, "lmc: could not start a swift process" else: self.spm = None self.swift_process = None # Dispersy self.session.dispersy_member = None if config['dispersy']: from Tribler.dispersy.callback import Callback from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import RawserverEndpoint, TunnelEndpoint from Tribler.dispersy.community import HardKilledCommunity # set communication endpoint if config['dispersy-tunnel-over-swift'] and self.swift_process: endpoint = TunnelEndpoint(self.swift_process) else: endpoint = RawserverEndpoint(self.rawserver, config['dispersy_port']) callback = Callback("Dispersy") # WARNING NAME SIGNIFICANT working_directory = unicode(config['state_dir']) self.dispersy = Dispersy(callback, endpoint, working_directory) # TODO: see if we can postpone dispersy.start to improve GUI responsiveness. # However, for now we must start self.dispersy.callback before running # try_register(nocachedb, self.database_thread)! self.dispersy.start() print >> sys.stderr, "lmc: Dispersy is listening on port", self.dispersy.wan_address[1], "using", endpoint self.upnp_ports.append((self.dispersy.wan_address[1], 'UDP')) self.dispersy.callback.call(self.dispersy.define_auto_load, args=(HardKilledCommunity,), kargs={'load': True}) # notify dispersy finished loading self.session.uch.notify(NTFY_DISPERSY, NTFY_STARTED, None) from Tribler.Core.permid import read_keypair from Tribler.dispersy.crypto import ec_to_public_bin, ec_to_private_bin keypair = read_keypair(self.session.get_permid_keypair_filename()) self.session.dispersy_member = callback.call(self.dispersy.get_member, (ec_to_public_bin(keypair), ec_to_private_bin(keypair))) self.database_thread = callback else: class FakeCallback(): def __init__(self): from Tribler.Utilities.TimedTaskQueue import TimedTaskQueue self.queue = TimedTaskQueue("FakeCallback") def register(self, call, args=(), kargs=None, delay=0.0, priority=0, id_=u"", callback=None, callback_args=(), callback_kargs=None, include_id=False): def do_task(): if kargs: call(*args, **kargs) else: call(*args) if callback: if callback_kargs: callback(*callback_args, **callback_kargs) else: callback(*callback_args) self.queue.add_task(do_task, t=delay) def shutdown(self, immediately=False): self.queue.shutdown(immediately) self.database_thread = FakeCallback() if config['megacache']: import Tribler.Core.CacheDB.cachedb as cachedb from Tribler.Core.CacheDB.SqliteCacheDBHandler import PeerDBHandler, TorrentDBHandler, MyPreferenceDBHandler, VoteCastDBHandler, ChannelCastDBHandler, NetworkBuzzDBHandler, UserEventLogDBHandler from Tribler.Category.Category import Category from Tribler.Core.Tag.Extraction import TermExtraction from Tribler.Core.CacheDB.sqlitecachedb import try_register if DEBUG: print >> sys.stderr, 'tlm: Reading Session state from', config['state_dir'] nocachedb = cachedb.init(config, self.rawserver_fatalerrorfunc) try_register(nocachedb, self.database_thread) self.cat = Category.getInstance(config['install_dir']) self.term = TermExtraction.getInstance(config['install_dir']) self.peer_db = PeerDBHandler.getInstance() self.peer_db.registerConnectionUpdater(self.session) self.torrent_db = TorrentDBHandler.getInstance() self.torrent_db.register(os.path.abspath(config['torrent_collecting_dir'])) self.mypref_db = MyPreferenceDBHandler.getInstance() self.votecast_db = VoteCastDBHandler.getInstance() self.votecast_db.registerSession(self.session) self.channelcast_db = ChannelCastDBHandler.getInstance() self.channelcast_db.registerSession(self.session) self.nb_db = NetworkBuzzDBHandler.getInstance() self.ue_db = UserEventLogDBHandler.getInstance() if self.dispersy: self.dispersy.database.attach_commit_callback(self.channelcast_db._db.commitNow) else: config['torrent_checking'] = 0 self.rtorrent_handler = None if config['torrent_collecting']: from Tribler.Core.RemoteTorrentHandler import RemoteTorrentHandler self.rtorrent_handler = RemoteTorrentHandler()