def run(): crypto = ECCrypto() dispersy = Dispersy( StandaloneEndpoint(options["port"], options["ip"]), options["statedir"], u'dispersy.db', crypto) if not dispersy.start(): raise RuntimeError("Unable to start Dispersy") master_member = TriblerChainCommunityCrawler.get_master_members( dispersy)[0] my_member = dispersy.get_member(private_key=crypto.key_to_bin( crypto.generate_key(u"curve25519"))) TriblerChainCommunityCrawler.init_community( dispersy, master_member, my_member) self._stopping = False def signal_handler(sig, frame): msg("Received signal '%s' in %s (shutting down)" % (sig, frame)) if not self._stopping: self._stopping = True dispersy.stop().addCallback(lambda _: reactor.stop()) signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler)
def setUp(self, autoload_discovery=True): yield super(TestCircuitDebugEndpoint, self).setUp(autoload_discovery=autoload_discovery) self.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) self.dispersy._database.open() master_member = DummyMember(self.dispersy, 1, "a" * 20) member = self.dispersy.get_new_member(u"curve25519") self.tunnel_community = HiddenTunnelCommunity(self.dispersy, master_member, member) self.dispersy.get_communities = lambda: [self.tunnel_community] self.session.get_dispersy_instance = lambda: self.dispersy
def setUp(self, autoload_discovery=True): yield super(TestTrustchainStatsEndpoint, self).setUp(autoload_discovery=autoload_discovery) self.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) self.dispersy._database.open() master_member = DummyMember(self.dispersy, 1, "a" * 20) self.member = self.dispersy.get_new_member(u"curve25519") self.tc_community = TriblerChainCommunity(self.dispersy, master_member, self.member) self.dispersy.get_communities = lambda: [self.tc_community] self.session.get_dispersy_instance = lambda: self.dispersy
def register_task(*args, **kwargs): global _register_task if not _register_task: # 21/11/11 Boudewijn: there are conditions where the Dispersy instance has not yet been # created. In this case we must wait. dispersy = Dispersy.has_instance() while not dispersy: sleep(0.1) dispersy = Dispersy.has_instance() _register_task = dispersy.callback.register return _register_task(*args, **kwargs)
def start_dispersy(self): msg("Starting dispersy") # We need to import the stuff _AFTER_ configuring the logging stuff. from Tribler.dispersy.callback import Callback from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import StandaloneEndpoint self._dispersy = Dispersy( Callback("Dispersy"), StandaloneEndpoint(int(self.my_id) + 12000, '0.0.0.0'), u'.', self._database_file) self._dispersy.statistics.enable_debug_statistics(True) if self._strict: def exception_handler(exception, fatal): msg("An exception occurred. Quitting because we are running with --strict enabled." ) print "Exception was:" try: raise exception except: from traceback import print_exc print_exc() # Set Dispersy's exit status to error self._dispersy_exit_status = 1 # Stop the experiment reactor.callLater(1, self.stop) return True self._dispersy.callback.attach_exception_handler(exception_handler) self._dispersy.start() # low (NID_sect233k1) isn't actually that low, switching to 160bits as this is comparable to rsa 1024 # http://www.nsa.gov/business/programs/elliptic_curve.shtml # speed difference when signing/verifying 100 items # NID_sect233k1 signing took 0.171 verify took 0.35 totals 0.521 # NID_secp160k1 signing took 0.04 verify took 0.04 totals 0.08 self._my_member = self._dispersy.callback.call( self._dispersy.get_new_member, (u"NID_secp160k1", )) self._master_member = self._dispersy.callback.call( self._dispersy.get_member, (self.master_key, )) self._dispersy.callback.register(self._do_log) msg("Finished starting dispersy")
def main(): sscfg = SessionStartupConfig() sscfg.set_state_dir(unicode(os.path.realpath("/tmp"))) sscfg.set_dispersy_port(6421) sscfg.set_nickname("dispersy") # The only modules needed by dispersy and DHT. sscfg.set_dispersy(True) sscfg.set_megacache(True) # Disable all other tribler modules. sscfg.set_swift_proc(False) sscfg.set_buddycast(False) sscfg.set_social_networking(False) sscfg.set_remote_query(False) sscfg.set_bartercast(False) sscfg.set_overlay(False) sscfg.set_torrent_collecting(False) sscfg.set_dialback(False) sscfg.set_internal_tracker(False) # Create the session and wait for it to be created. session = Session(sscfg) time.sleep(5) # Create the dispersy instance and make it accessible out of the main(). dispersy.append(Dispersy.has_instance()) # Create the NetworkBuzzDBHandler that should be made in the tribler GUI. NetworkBuzzDBHandler.getInstance() #def on_torrent(messages): #pass # Find the search community from the dispersy instance. def findSearchCommunity(): for community in dispersy[0].get_communities(): if isinstance(community, SearchCommunity): search_community.append(community) #searchCommunity.on_torrent = on_torrent break # Let the dispersy thread find the search community. # MUST be called on the dispersy thread. dispersy[0].callback.register(findSearchCommunity) # Any search request before this point will create a segfault! print >> sys.stderr, "Ready to search!" # Keep the main function spinning to keep the session alive and dispersy and DHT running. try: while True: sys.stdin.read() except: print_exc() # Shutdown everything. session.shutdown() print "Shutting down..." time.sleep(5)
def _showInspectionTool(self): import wx.lib.inspection itool = wx.lib.inspection.InspectionTool() itool.Show() try: frame = itool._frame import Tribler frame.locals['Tribler'] = Tribler from Tribler.Core.Overlay.SecureOverlay import SecureOverlay overlay = SecureOverlay.getInstance() frame.locals['overlay'] = overlay session = Session.get_instance() frame.locals['session'] = session from Tribler.Core.BuddyCast.buddycast import BuddyCastFactory channelcast = BuddyCastFactory.getInstance().channelcast_core frame.locals['channelcast'] = channelcast frame.locals['dispersy'] = Dispersy.get_instance() except Exception: import traceback traceback.print_exc()
def setUp(self, annotate=True, autoload_discovery=True): """ Setup some classes and files that are used by the tests in this module. """ yield super(BaseTestChannel, self).setUp(autoload_discovery=autoload_discovery) if annotate: self.annotate(self._testMethodName, start=True) self.fake_session = MockObject() self.fake_session.get_state_dir = lambda: self.session_base_dir self.fake_session.add_observer = lambda a, b, c: False fake_notifier = MockObject() fake_notifier.add_observer = lambda a, b, c, d: False fake_notifier.notify = lambda a, b, c, d: False self.fake_session.notifier = fake_notifier self.fake_channel_community = MockObject() self.fake_channel_community.get_channel_id = lambda: 42 self.fake_channel_community.cid = 'a' * 20 self.fake_channel_community.get_channel_name = lambda: "my fancy channel" self.channel_db_handler = self.session.open_dbhandler(NTFY_CHANNELCAST) self.votecast_db_handler = self.session.open_dbhandler(NTFY_VOTECAST) self.session.get_dispersy = lambda: True self.session.lm.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir())
def __init__(self, parent): self.buildColumns = False self.dispersy = Dispersy.get_instance() HomePanel.__init__(self, parent, 'Dispersy info' , LIST_BLUE) self.SetMinSize((-1, 200)) self.timer = wx.Timer(self) self.Bind(wx.EVT_TIMER, self._onTimer, self.timer) self.timer.Start(5000, False) self.UpdateStats() self.mapping = {"total_down":[("Down", lambda info: self.utility.size_format(info["total_down"])), ("Down avg", lambda info: self.utility.size_format(int(info["total_down"] / info["runtime"])) + "/s")], "total_up":[("Up", lambda info: self.utility.size_format(info["total_up"])), ("Up avg", lambda info: self.utility.size_format(int(info["total_up"] / info["runtime"])) + "/s")], "drop":[("Dropped", lambda info: "%s ~%.1f%%" % (self.utility.size_format(int(sum(byte_count for _, byte_count in info["drop"].itervalues()))), (100.0 * sum(byte_count for _, byte_count in info["drop"].itervalues()) / info["total_down"]) if info["total_down"] else 0.0))], "walk_success":[("Walker success", lambda info: "%d / %d ~%.1f%%" % (info["walk_success"], info["walk_attempt"], (100.0 * info["walk_success"] / info["walk_attempt"]) if info["walk_attempt"] else 0.0))], "walk_reset":[("Walker resets", lambda info: str(info["walk_reset"]))], "wan_address":[("Address wan", lambda info: "%s:%d" % info["wan_address"])], "lan_address":[("Address lan", lambda info: "%s:%d" % info["lan_address"])], "runtime":[("Runtime", lambda info: self.utility.eta_value(info["runtime"]))], "walk_attempt":[], "outgoing":[], "timestamp":[], "class":[], "success":[], "delay":[], "version":[], "communities":[], "sequence_number":[], "start":[], "walk_fail":[], "attachment":[]}
def execute_scenario_cmds(self, commands): torrents = [] for command in commands: cur_command = command.split() if cur_command[0] == 'create': log(self._logfile, "creating-community") self.my_channel = ChannelCommunity.create_community(self.my_member, integrate_with_tribler = False) log(self._logfile, "creating-channel-message") self.my_channel.create_channel(u'', u'') elif cur_command[0] == 'publish': if self.my_channel: infohash = str(self.torrentindex) infohash += ''.join(choice(letters) for _ in xrange(20-len(infohash))) name = u''.join(choice(letters) for _ in xrange(100)) files = [] for _ in range(10): files.append((u''.join(choice(letters) for _ in xrange(30)), 123455)) trackers = [] for _ in range(10): trackers.append(''.join(choice(letters) for _ in xrange(30))) files = tuple(files) trackers = tuple(trackers) self.torrentindex += 1 torrents.append((infohash, int(time()), name, files, trackers)) elif cur_command[0] == 'post': if self.joined_community: text = ''.join(choice(letters) for i in xrange(160)) self.joined_community._disp_create_comment(text, int(time()), None, None, None, None) elif cur_command[0] == 'join': self.want_to_join = True if self.want_to_join: from Tribler.dispersy.dispersy import Dispersy dispersy = Dispersy.get_instance() log(self._logfile, "trying-to-join-community") for community in dispersy.get_communities(): if isinstance(community, PreviewChannelCommunity) and community._channel_id: self._community.disp_create_votecast(community.cid, 2, int(time())) log(self._logfile, "joining-community") self.joined_community = community self.want_to_join = False break if len(torrents) > 0: log(self._logfile, "creating-torrents") self.my_channel._disp_create_torrents(torrents)
def __init__(self, parent): self.buildColumns = False self.dispersy = Dispersy.get_instance() HomePanel.__init__(self, parent, 'Dispersy info', LIST_BLUE) self.SetMinSize((-1, 200)) self.timer = wx.Timer(self) self.Bind(wx.EVT_TIMER, self._onTimer, self.timer) self.timer.Start(5000, False) self.UpdateStats() self.mapping = { "total_down": [("Down", lambda info: self.utility.size_format(info["total_down"])), ("Down avg", lambda info: self.utility.size_format( int(info["total_down"] / info["runtime"])) + "/s")], "total_up": [("Up", lambda info: self.utility.size_format(info["total_up"])), ("Up avg", lambda info: self.utility.size_format( int(info["total_up"] / info["runtime"])) + "/s")], "drop": [("Dropped", lambda info: "%s ~%.1f%%" % (self.utility.size_format( int( sum(byte_count for _, byte_count in info["drop"].itervalues()))), (100.0 * sum(byte_count for _, byte_count in info["drop"]. itervalues()) / info["total_down"]) if info["total_down"] else 0.0))], "walk_success": [("Walker success", lambda info: "%d / %d ~%.1f%%" % (info["walk_success"], info["walk_attempt"], (100.0 * info["walk_success"] / info["walk_attempt"]) if info["walk_attempt"] else 0.0))], "walk_reset": [("Walker resets", lambda info: str(info["walk_reset"]))], "wan_address": [("Address wan", lambda info: "%s:%d" % info["wan_address"])], "lan_address": [("Address lan", lambda info: "%s:%d" % info["lan_address"])], "runtime": [("Runtime", lambda info: self.utility.eta_value(info["runtime"])) ], "walk_attempt": [], "outgoing": [], "timestamp": [], "class": [], "success": [], "delay": [], "version": [], "communities": [], "sequence_number": [], "start": [], "walk_fail": [], "attachment": [] }
def setUp(self, annotate=True, autoload_discovery=True): yield super(TestBoostingManagerSysChannel, self).setUp() self.channel_db_handler = self.session.open_dbhandler(NTFY_CHANNELCAST) self.channel_db_handler._get_my_dispersy_cid = lambda: "myfakedispersyid" self.session.config.get_dispersy_enabled = lambda: True self.session.lm.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) self.dispersy_cid_hex = "abcd" * 9 + "0012" self.dispersy_cid = binascii.unhexlify(self.dispersy_cid_hex)
def setUp(self): super(TestBarterStatistics, self).setUp() self.stats = BarterStatistics() self._peer1 = "peer1" self._peer2 = "peer2" self._peer3 = "peer3" self._peer4 = "peer4" self._peer5 = "peer5" self.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir())
def start_dispersy(self, autoload_discovery=True): msg("Starting dispersy") # We need to import the stuff _AFTER_ configuring the logging stuff. try: from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import StandaloneEndpoint from Tribler.dispersy.util import unhandled_error_observer except: from dispersy.dispersy import Dispersy from dispersy.endpoint import StandaloneEndpoint from dispersy.util import unhandled_error_observer self._dispersy = Dispersy( StandaloneEndpoint(int(self.my_id) + 12000, '0.0.0.0'), u'.', self._database_file, self._crypto) self._dispersy.statistics.enable_debug_statistics(True) self.original_on_incoming_packets = self._dispersy.on_incoming_packets if self._strict: from twisted.python.log import addObserver addObserver(unhandled_error_observer) self._dispersy.start(autoload_discovery=autoload_discovery) if self.master_private_key: self._master_member = self._dispersy.get_member( private_key=self.master_private_key) else: self._master_member = self._dispersy.get_member( public_key=self.master_key) self._my_member = self._dispersy.get_member( private_key=self.my_member_private_key) assert self._master_member assert self._my_member self._do_log() self.print_on_change('community-kwargs', {}, self.community_kwargs) self.print_on_change('community-env', {}, {'pid': getpid()}) msg("Finished starting dispersy")
class AbstractTestCommunity(AbstractServer): # We have to initialize Dispersy and the tunnel community on the reactor thread @inlineCallbacks def setUp(self): yield super(AbstractTestCommunity, self).setUp() self.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) self.dispersy._database.open() self.master_member = DummyMember(self.dispersy, 1, "a" * 20) self.member = self.dispersy.get_new_member(u"curve25519") @inlineCallbacks def tearDown(self): for community in self.dispersy.get_communities(): yield community.unload_community() self.master_member = None self.member = None yield super(AbstractTestCommunity, self).tearDown()
def test_chn_exist_lookup(self): """ testing existing channel as a source. It also tests how boosting manager cope with unknown channel with retrying the lookup """ self.session.get_dispersy = lambda: True self.session.lm.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) dispersy_cid_hex = "abcd" * 9 + "0012" dispersy_cid = binascii.unhexlify(dispersy_cid_hex) # create channel and insert torrent self.create_fake_allchannel_community() self.create_torrents_in_channel(dispersy_cid_hex) # channel is exist community = ChannelCommunity.init_community( self.session.lm.dispersy, self.session.lm.dispersy.get_member(mid=dispersy_cid), self.session.lm.dispersy._communities['allchannel']._my_member, self.session) # make the id unknown so boosting manager can test repeating search id_tmp = community._channel_id community._channel_id = 0 def _set_id_channel(channel_id): """ set channel id manually (emulate finding) """ community._channel_id = channel_id reactor.callLater(5, _set_id_channel, id_tmp) self.boosting_manager.add_source(dispersy_cid) chn_obj = self.boosting_manager.get_source_object(dispersy_cid) chn_obj._load_torrent = self._load def clean_community(_): """ cleanly exit the community we are in """ if chn_obj.community: chn_obj.community.cancel_all_pending_tasks() chn_obj.kill_tasks() d = self.check_source(dispersy_cid) d.addCallback(clean_community) return d
def test_chn_native_load(self): self.session.get_dispersy = lambda: True self.session.lm.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) dispersy_cid_hex = "abcd" * 9 + "0012" dispersy_cid = binascii.unhexlify(dispersy_cid_hex) # create channel and insert torrent self.create_fake_allchannel_community() self.create_torrents_in_channel(dispersy_cid_hex) self.session.download_torrentfile = \ lambda dummy_ihash, function, _: function(binascii.hexlify(TORRENT_UBUNTU_FILE_INFOHASH)) def get_bin_torrent(_): """ get binary data of a torrent """ f = open(TORRENT_UBUNTU_FILE, "rb") bdata = f.read() f.close() return bdata self.session.get_collected_torrent = get_bin_torrent self.boosting_manager.add_source(dispersy_cid) def _loop_check(_): defer_param = defer.Deferred() def check_loaded(src): """ check if a torrent has been loaded """ src_obj = self.boosting_manager.get_source_object(src) if src_obj.loaded_torrent[ TORRENT_UBUNTU_FILE_INFOHASH] is not None: src_obj.community.cancel_all_pending_tasks() src_obj.kill_tasks() self.check_loaded_lc.stop() self.check_loaded_lc = None defer_param.callback(src) self.check_loaded_lc = LoopingCall(check_loaded, dispersy_cid) self.check_loaded_lc.start(1, now=True) return defer_param defer_ret = self.check_source(dispersy_cid) defer_ret.addCallback(_loop_check) return defer_ret
def load_community(cls, master): dispersy = Dispersy.get_instance() try: # test if this community already exists classification, = next(dispersy.database.execute(u"SELECT classification FROM community WHERE master = ?", (master.database_id,))) except StopIteration: # join the community with a new my_member, using a cheap cryptography key ec = ec_generate_key(u"NID_secp160r1") return cls.join_community(master, Member(ec_to_public_bin(ec), ec_to_private_bin(ec))) else: if classification == cls.get_classification(): return super(EffortCommunity, cls).load_community(master) else: raise RuntimeError("Unable to load an EffortCommunity that has been killed")
def start_dispersy(self, autoload_discovery=True): msg("Starting dispersy") # We need to import the stuff _AFTER_ configuring the logging stuff. try: from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import StandaloneEndpoint except: from dispersy.dispersy import Dispersy from dispersy.endpoint import StandaloneEndpoint self._dispersy = Dispersy(StandaloneEndpoint(int(self.my_id) + 12000, '0.0.0.0'), u'.', self._database_file, self._crypto) self._dispersy.statistics.enable_debug_statistics(True) self.original_on_incoming_packets = self._dispersy.on_incoming_packets if self._strict: def exception_handler(exception, fatal): msg("An exception occurred. Quitting because we are running with --strict enabled.") print >> stderr, "Exception was:" try: raise exception except: from traceback import print_exc print_exc() # Set Dispersy's exit status to error self._dispersy_exit_status = 1 # Stop the experiment reactor.callLater(1, self.stop) return True #self._dispersy.callback.attach_exception_handler(exception_handler) self._dispersy.start(autoload_discovery=autoload_discovery) if self.master_private_key: self._master_member = self._dispersy.get_member(private_key=self.master_private_key) else: self._master_member = self._dispersy.get_member(public_key=self.master_key) self._my_member = self._dispersy.get_member(private_key=self.my_member_private_key) assert self._master_member assert self._my_member self._do_log() self.print_on_change('community-kwargs', {}, self.community_kwargs) self.print_on_change('community-env', {}, {'pid':getpid()}) msg("Finished starting dispersy")
def load_community(cls, master, swift_process): dispersy = Dispersy.get_instance() try: # test if this community already exists classification, = next(dispersy.database.execute(u"SELECT classification FROM community WHERE master = ?", (master.database_id,))) except StopIteration: # join the community with a new my_member, using a cheap cryptography key ec = ec_generate_key(u"NID_secp160r1") return cls.join_community(master, Member(ec_to_public_bin(ec), ec_to_private_bin(ec)), swift_process) else: if classification == cls.get_classification(): return super(EffortCommunity, cls).load_community(master, swift_process) else: raise RuntimeError("Unable to load an EffortCommunity that has been killed")
def _showInspectionTool(self): import wx.lib.inspection itool = wx.lib.inspection.InspectionTool() itool.Show() try: frame = itool._frame import Tribler frame.locals['Tribler'] = Tribler session = Session.get_instance() frame.locals['session'] = session frame.locals['dispersy'] = Dispersy.get_instance() except Exception: import traceback traceback.print_exc()
def __init__(self, parent): self.buildColumns = False self.dispersy = Dispersy.has_instance() if not self.dispersy: raise RuntimeError("Dispersy has not started yet") HomePanel.__init__(self, parent, 'Dispersy info' , SEPARATOR_GREY) self.SetMinSize((-1, 200)) self.timer = wx.Timer(self) self.Bind(wx.EVT_TIMER, self._onTimer, self.timer) self.timer.Start(5000, False) self.UpdateStats() def ratio(i, j): return "%d / %d ~%.1f%%" % (i, j, (100.0 * i / j) if j else 0.0) self.mapping = [ ("WAN Address", '', lambda stats: "%s:%d" % stats.wan_address), ("LAN Address", '', lambda stats: "%s:%d" % stats.lan_address), ("Connection", '', lambda stats: str(stats.connection_type)), ("Runtime", '', lambda stats: self.utility.eta_value(stats.timestamp - stats.start)), ("Download", '', lambda stats: self.utility.size_format(stats.total_down) + " or " + self.utility.size_format(int(stats.total_down / (stats.timestamp - stats.start))) + "/s"), ("Upload", '', lambda stats: self.utility.size_format(stats.total_up) + " or " + self.utility.size_format(int(stats.total_up / (stats.timestamp - stats.start))) + "/s"), ("Packets send", 'Packets send vs Packets handled', lambda stats: ratio(stats.total_send, stats.received_count+stats.total_send)), ("Packets received", 'Packets received vs Packets handled', lambda stats: ratio(stats.received_count, stats.received_count+stats.total_send)), ("Packets dropped", 'Packets dropped vs Packets received', lambda stats: ratio(stats.drop_count, stats.received_count)), ("Packets success", 'Messages successfully handled vs Packets received', lambda stats: ratio(stats.success_count, stats.received_count)), ("Packets delayed", 'Packets being delayed vs Packets reveived', lambda stats: ratio(stats.delay_count, stats.received_count)), ("Sync-Messages created", 'Total number of sync messages created by us in this session', lambda stats: str(stats.created_count)), ("Candidates reuse", 'Candidates discovered (intro or stumbled) vs Candidates active in more than one community', lambda stats: ratio(stats.total_candidates_overlapped, stats.total_candidates_discovered)), ("Packets delayed send", 'Total number of delaymessages or delaypacket messages being send', lambda stats: ratio(stats.delay_send, stats.delay_count)), ("Packets delayed success", 'Total number of packets which were delayed, and did not timeout', lambda stats: ratio(stats.delay_success, stats.delay_count)), ("Packets delayed timeout", 'Total number of packets which were delayed, but got a timeout', lambda stats: ratio(stats.delay_timeout, stats.delay_count)), ("Walker success", '', lambda stats: ratio(stats.walk_success, stats.walk_attempt)), ("Walker success (from trackers)", 'Comparing the successes to tracker to overall successes.', lambda stats: ratio(stats.walk_bootstrap_success, stats.walk_bootstrap_attempt)), ("Walker resets", '', lambda stats: str(stats.walk_reset)), ("Bloom reuse", '', lambda stats: ratio(sum(c.sync_bloom_reuse for c in stats.communities), sum(c.sync_bloom_new for c in stats.communities))), ("Revision", '', lambda stats: str(max(stats.revision.itervalues()))), ("Debug mode", '', lambda stats: "yes" if __debug__ else "no"), ]
def setUp(self): """ The startup method of this class creates a fake Dispersy instance with a fake AllChannel community. It also inserts some random channels so we have some data to work with. """ yield super(TestChannelsSubscriptionEndpoint, self).setUp() self.expected_votecast_cid = None self.expected_votecast_vote = None self.create_votecast_called = False fake_community = self.create_fake_allchannel_community() fake_community.disp_create_votecast = self.on_dispersy_create_votecast self.session.config.get_dispersy_enabled = lambda: True self.session.lm.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) self.session.lm.dispersy.attach_community(fake_community) for i in xrange(0, 10): self.insert_channel_in_db('rand%d' % i, 42 + i, 'Test channel %d' % i, 'Test description %d' % i)
def start_dispersy(self): msg("Starting dispersy") # We need to import the stuff _AFTER_ configuring the logging stuff. from Tribler.dispersy.callback import Callback from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import StandaloneEndpoint self._dispersy = Dispersy(Callback("Dispersy"), StandaloneEndpoint(int(self.my_id) + 12000, '0.0.0.0'), u'.', self._database_file) self._dispersy.statistics.enable_debug_statistics(True) if self._strict: def exception_handler(exception, fatal): msg("An exception occurred. Quitting because we are running with --strict enabled.") print "Exception was:" try: raise exception except: from traceback import print_exc print_exc() # Set Dispersy's exit status to error self._dispersy_exit_status = 1 # Stop the experiment reactor.callLater(1, self.stop) return True self._dispersy.callback.attach_exception_handler(exception_handler) self._dispersy.start() # low (NID_sect233k1) isn't actually that low, switching to 160bits as this is comparable to rsa 1024 # http://www.nsa.gov/business/programs/elliptic_curve.shtml # speed difference when signing/verifying 100 items # NID_sect233k1 signing took 0.171 verify took 0.35 totals 0.521 # NID_secp160k1 signing took 0.04 verify took 0.04 totals 0.08 self._my_member = self._dispersy.callback.call(self._dispersy.get_new_member, (u"NID_secp160k1",)) self._master_member = self._dispersy.callback.call(self._dispersy.get_member, (self.master_key,)) self._dispersy.callback.register(self._do_log) msg("Finished starting dispersy")
def register(self, session, session_lock): assert isInIOThread() if not self.registered: self.registered = True self.session = session self.session_lock = session_lock # On Mac, we bundle the root certificate for the SSL validation since Twisted is not using the root # certificates provided by the system trust store. if sys.platform == 'darwin': os.environ['SSL_CERT_FILE'] = os.path.join( get_lib_path(), 'root_certs_mac.pem') if self.session.config.get_torrent_store_enabled(): from Tribler.Core.leveldbstore import LevelDbStore self.torrent_store = LevelDbStore( self.session.config.get_torrent_store_dir()) if not self.torrent_store.get_db(): raise RuntimeError( "Torrent store (leveldb) is None which should not normally happen" ) if self.session.config.get_metadata_enabled(): from Tribler.Core.leveldbstore import LevelDbStore self.metadata_store = LevelDbStore( self.session.config.get_metadata_store_dir()) if not self.metadata_store.get_db(): raise RuntimeError( "Metadata store (leveldb) is None which should not normally happen" ) # torrent collecting: RemoteTorrentHandler if self.session.config.get_torrent_collecting_enabled(): from Tribler.Core.RemoteTorrentHandler import RemoteTorrentHandler self.rtorrent_handler = RemoteTorrentHandler(self.session) # TODO(emilon): move this to a megacache component or smth if self.session.config.get_megacache_enabled(): from Tribler.Core.CacheDB.SqliteCacheDBHandler import ( PeerDBHandler, TorrentDBHandler, MyPreferenceDBHandler, VoteCastDBHandler, ChannelCastDBHandler) from Tribler.Core.Category.Category import Category self._logger.debug('tlm: Reading Session state from %s', self.session.config.get_state_dir()) self.category = Category() # create DBHandlers self.peer_db = PeerDBHandler(self.session) self.torrent_db = TorrentDBHandler(self.session) self.mypref_db = MyPreferenceDBHandler(self.session) self.votecast_db = VoteCastDBHandler(self.session) self.channelcast_db = ChannelCastDBHandler(self.session) # initializes DBHandlers self.peer_db.initialize() self.torrent_db.initialize() self.mypref_db.initialize() self.votecast_db.initialize() self.channelcast_db.initialize() from Tribler.Core.Modules.tracker_manager import TrackerManager self.tracker_manager = TrackerManager(self.session) if self.session.config.get_video_server_enabled(): self.video_server = VideoServer( self.session.config.get_video_server_port(), self.session) self.video_server.start() # IPv8 if self.session.config.get_ipv8_enabled(): from Tribler.pyipv8.ipv8.configuration import get_default_configuration ipv8_config = get_default_configuration() ipv8_config['port'] = self.session.config.get_dispersy_port() ipv8_config['address'] = self.session.config.get_ipv8_address() ipv8_config['overlays'] = [] ipv8_config['keys'] = [] # We load the keys ourselves if self.session.config.get_ipv8_bootstrap_override(): import Tribler.pyipv8.ipv8.deprecated.community as community_file community_file._DEFAULT_ADDRESSES = [ self.session.config.get_ipv8_bootstrap_override() ] community_file._DNS_ADDRESSES = [] self.ipv8 = IPv8(ipv8_config) self.session.config.set_anon_proxy_settings( 2, ("127.0.0.1", self.session.config. get_tunnel_community_socks5_listen_ports())) # Dispersy self.tftp_handler = None if self.session.config.get_dispersy_enabled(): from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import MIMEndpoint from Tribler.dispersy.endpoint import IPv8toDispersyAdapter # set communication endpoint if self.session.config.get_ipv8_enabled(): dispersy_endpoint = IPv8toDispersyAdapter( self.ipv8.endpoint) else: dispersy_endpoint = MIMEndpoint( self.session.config.get_dispersy_port()) working_directory = unicode( self.session.config.get_state_dir()) self.dispersy = Dispersy(dispersy_endpoint, working_directory) self.dispersy.statistics.enable_debug_statistics(False) # register TFTP service from Tribler.Core.TFTP.handler import TftpHandler self.tftp_handler = TftpHandler(self.session, dispersy_endpoint, "fffffffd".decode('hex'), block_size=1024) self.tftp_handler.initialize() # Torrent search if self.session.config.get_torrent_search_enabled( ) or self.session.config.get_channel_search_enabled(): self.search_manager = SearchManager(self.session) self.search_manager.initialize() if not self.initComplete: self.init() self.session.add_observer(self.on_tribler_started, NTFY_TRIBLER, [NTFY_STARTED]) self.session.notifier.notify(NTFY_TRIBLER, NTFY_STARTED, None) return self.startup_deferred
class DispersyExperimentScriptClient(ExperimentClient): scenario_file = None def __init__(self, vars): ExperimentClient.__init__(self, vars) self._dispersy = None self._community = None self._database_file = u"dispersy.db" self._dispersy_exit_status = None self._is_joined = False self._strict = True self.community_args = [] self.community_kwargs = {} self._stats_file = None self._online_buffer = [] self._crypto = self.initializeCrypto() self.generateMyMember() self.vars['private_keypair'] = base64.encodestring(self.my_member_private_key) def onVarsSend(self): scenario_file_path = path.join(environ['EXPERIMENT_DIR'], self.scenario_file) self.scenario_runner = ScenarioRunner(scenario_file_path) t1 = time() self.scenario_runner._read_scenario(scenario_file_path) msg('Took %.2f to read scenario file' % (time() - t1)) def onIdReceived(self): self.scenario_runner.set_peernumber(int(self.my_id)) # TODO(emilon): Auto-register this stuff self.scenario_runner.register(self.echo) self.scenario_runner.register(self.online) self.scenario_runner.register(self.offline) self.scenario_runner.register(self.churn) self.scenario_runner.register(self.churn, 'churn_pattern') self.scenario_runner.register(self.set_community_kwarg) self.scenario_runner.register(self.set_database_file) self.scenario_runner.register(self.use_memory_database) self.scenario_runner.register(self.set_ignore_exceptions) self.scenario_runner.register(self.start_dispersy) self.scenario_runner.register(self.stop_dispersy) self.scenario_runner.register(self.stop) self.scenario_runner.register(self.set_master_member) self.scenario_runner.register(self.reset_dispersy_statistics, 'reset_dispersy_statistics') self.scenario_runner.register(self.annotate) self.scenario_runner.register(self.peertype) self.registerCallbacks() t1 = time() self.scenario_runner.parse_file() msg('Took %.2f to parse scenario file' % (time() - t1)) def startExperiment(self): msg("Starting dispersy scenario experiment") # TODO(emilon): Move this to the right place # TODO(emilon): Do we want to have the .dbs in the output dirs or should they be dumped to /tmp? my_dir = path.join(environ['OUTPUT_DIR'], self.my_id) makedirs(my_dir) chdir(my_dir) self._stats_file = open("statistics.log", 'w') # TODO(emilon): Fix me or kill me try: bootstrap_fn = path.join(environ['PROJECT_DIR'], 'tribler', 'bootstraptribler.txt') if not path.exists(bootstrap_fn): bootstrap_fn = path.join(environ['PROJECT_DIR'], 'bootstraptribler.txt') symlink(bootstrap_fn, 'bootstraptribler.txt') except OSError: pass self.scenario_runner.run() def registerCallbacks(self): pass def initializeCrypto(self): try: from Tribler.dispersy.crypto import ECCrypto, NoCrypto except: from dispersy.crypto import ECCrypto, NoCrypto if environ.get('TRACKER_CRYPTO', 'ECCrypto') == 'ECCrypto': msg('Turning on ECCrypto') return ECCrypto() msg('Turning off Crypto') return NoCrypto() @property def my_member_key_curve(self): # low (NID_sect233k1) isn't actually that low, switching to 160bits as this is comparable to rsa 1024 # http://www.nsa.gov/business/programs/elliptic_curve.shtml # speed difference when signing/verifying 100 items # NID_sect233k1 signing took 0.171 verify took 0.35 totals 0.521 # NID_secp160k1 signing took 0.04 verify took 0.04 totals 0.08 return u"NID_secp160k1" def generateMyMember(self): ec = self._crypto.generate_key(self.my_member_key_curve) self.my_member_key = self._crypto.key_to_bin(ec.pub()) self.my_member_private_key = self._crypto.key_to_bin(ec) # # Actions # def echo(self, *argv): msg("%s ECHO" % self.my_id, ' '.join(argv)) def set_community_args(self, args): """ Example: '1292333014,12923340000' """ self.community_args = args.split(',') def set_community_kwargs(self, kwargs): """ Example: 'startingtimestamp=1292333014,endingtimestamp=12923340000' """ for karg in kwargs.split(","): if "=" in karg: key, value = karg.split("=", 1) self.community_kwargs[key.strip()] = value.strip() def set_community_kwarg(self, key, value): self.community_kwargs[key] = value def set_database_file(self, filename): self._database_file = unicode(filename) def use_memory_database(self): self._database_file = u':memory:' def set_ignore_exceptions(self, boolean): self._strict = not self.str2bool(boolean) def start_dispersy(self, autoload_discovery=True): msg("Starting dispersy") # We need to import the stuff _AFTER_ configuring the logging stuff. try: from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import StandaloneEndpoint except: from dispersy.dispersy import Dispersy from dispersy.endpoint import StandaloneEndpoint self._dispersy = Dispersy(StandaloneEndpoint(int(self.my_id) + 12000, '0.0.0.0'), u'.', self._database_file, self._crypto) self._dispersy.statistics.enable_debug_statistics(True) self.original_on_incoming_packets = self._dispersy.on_incoming_packets if self._strict: def exception_handler(exception, fatal): msg("An exception occurred. Quitting because we are running with --strict enabled.") print >> stderr, "Exception was:" try: raise exception except: from traceback import print_exc print_exc() # Set Dispersy's exit status to error self._dispersy_exit_status = 1 # Stop the experiment reactor.callLater(1, self.stop) return True #self._dispersy.callback.attach_exception_handler(exception_handler) self._dispersy.start(autoload_discovery=autoload_discovery) if self.master_private_key: self._master_member = self._dispersy.get_member(private_key=self.master_private_key) else: self._master_member = self._dispersy.get_member(public_key=self.master_key) self._my_member = self._dispersy.get_member(private_key=self.my_member_private_key) assert self._master_member assert self._my_member self._do_log() self.print_on_change('community-kwargs', {}, self.community_kwargs) self.print_on_change('community-env', {}, {'pid':getpid()}) msg("Finished starting dispersy") def stop_dispersy(self): self._dispersy_exit_status = self._dispersy.stop() def stop(self, retry=3): retry = int(retry) if self._dispersy_exit_status is None and retry: reactor.callLater(1, self.stop, retry - 1) else: msg("Dispersy exit status was:", self._dispersy_exit_status) reactor.callLater(0, reactor.stop) def set_master_member(self, pub_key, priv_key=''): self.master_key = pub_key.decode("HEX") self.master_private_key = priv_key.decode("HEX") def online(self, dont_empty=False): msg("Trying to go online") if self._community is None: msg("online") msg("join community %s as %s" % (self._master_member.mid.encode("HEX"), self._my_member.mid.encode("HEX"))) self._dispersy.on_incoming_packets = self.original_on_incoming_packets self._community = self.community_class.init_community(self._dispersy, self._master_member, self._my_member, *self.community_args, **self.community_kwargs) self._community.auto_load = False assert self.is_online() if not dont_empty: self.empty_buffer() else: msg("online (we are already online)") def offline(self): msg("Trying to go offline") if self._community is None and self._is_joined: msg("offline (we are already offline)") else: msg("offline") for community in self._dispersy.get_communities(): community.unload_community() self._community = None self._dispersy.on_incoming_packets = lambda *params: None if self._database_file == u':memory:': msg("Be careful with memory databases and nodes going offline, you could be losing database because we're closing databases.") def is_online(self): return self._community != None def churn(self, *args): self.print_on_change('community-churn', {}, {'args':args}) def buffer_call(self, func, args, kargs): if len(self._online_buffer) == 0 and self.is_online(): func(*args, **kargs) else: self._online_buffer.append((func, args, kargs)) def empty_buffer(self): assert self.is_online() # perform all tasks which were scheduled while we were offline for func, args, kargs in self._online_buffer: try: func(*args, **kargs) except: print_exc() self._online_buffer = [] def reset_dispersy_statistics(self): self._dispersy._statistics.reset() def annotate(self, message): self._stats_file.write('%.1f %s %s %s\n' % (time(), self.my_id, "annotate", message)) def peertype(self, peertype): self._stats_file.write('%.1f %s %s %s\n' % (time(), self.my_id, "peertype", peertype)) # # Aux. functions # def get_private_keypair_by_id(self, peer_id): if str(peer_id) in self.all_vars: key = self.all_vars[str(peer_id)]['private_keypair'] if isinstance(key, basestring): key = self.all_vars[str(peer_id)]['private_keypair'] = self._crypto.key_from_private_bin(base64.decodestring(key)) return key def get_private_keypair(self, ip, port): port = int(port) for peer_dict in self.all_vars.itervalues(): if peer_dict['host'] == ip and int(peer_dict['port']) == port: key = peer_dict['private_keypair'] if isinstance(key, basestring): key = peer_dict['private_keypair'] = self._crypto.key_from_private_bin(base64.decodestring(key)) return key err("Could not get_private_keypair for", ip, port) def str2bool(self, v): return v.lower() in ("yes", "true", "t", "1") def str2tuple(self, v): if len(v) > 1 and v[1] == "t": return (int(v[0]), int(v[2:])) if len(v) > 1 and v[1] == ".": return float(v) return int(v) def print_on_change(self, name, prev_dict, cur_dict): def get_changed_values(prev_dict, cur_dict): new_values = {} changed_values = {} if cur_dict: for key, value in cur_dict.iteritems(): # convert key to make it printable if not isinstance(key, (basestring, int, long, float)): key = str(key) # if this is a dict, recursively check for changed values if isinstance(value, dict): converted_dict, changed_in_dict = get_changed_values(prev_dict.get(key, {}), value) new_values[key] = converted_dict if changed_in_dict: changed_values[key] = changed_in_dict # else convert and compare single value else: if not isinstance(value, (basestring, int, long, float, Iterable)): value = str(value) new_values[key] = value if prev_dict.get(key, None) != value: changed_values[key] = value return new_values, changed_values new_values, changed_values = get_changed_values(prev_dict, cur_dict) if changed_values: self._stats_file.write('%.1f %s %s %s\n' % (time(), self.my_id, name, json.dumps(changed_values))) self._stats_file.flush() return new_values return prev_dict @inlineCallbacks def _do_log(self): try: from Tribler.dispersy.candidate import CANDIDATE_STUMBLE_LIFETIME, CANDIDATE_WALK_LIFETIME, CANDIDATE_INTRO_LIFETIME except: from dispersy.candidate import CANDIDATE_STUMBLE_LIFETIME, CANDIDATE_WALK_LIFETIME, CANDIDATE_INTRO_LIFETIME total_stumbled_candidates = defaultdict(lambda:defaultdict(set)) prev_statistics = {} prev_total_received = {} prev_total_dropped = {} prev_total_delayed = {} prev_total_outgoing = {} prev_total_fail = {} prev_endpoint_recv = {} prev_endpoint_send = {} prev_created_messages = {} prev_bootstrap_candidates = {} while True: self._dispersy.statistics.update() communities_dict = {} for c in self._dispersy.statistics.communities: if c._community.dispersy_enable_candidate_walker: # determine current size of candidates categories nr_walked = nr_intro = nr_stumbled = 0 # we add all candidates which have a last_stumble > now - CANDIDATE_STUMBLE_LIFETIME now = time() for candidate in c._community.candidates.itervalues(): if candidate.last_stumble > now - CANDIDATE_STUMBLE_LIFETIME: nr_stumbled += 1 mid = candidate.get_member().mid total_stumbled_candidates[c.hex_cid][candidate.last_stumble].add(mid) if candidate.last_walk > now - CANDIDATE_WALK_LIFETIME: nr_walked += 1 if candidate.last_intro > now - CANDIDATE_INTRO_LIFETIME: nr_intro += 1 else: nr_walked = nr_intro = nr_stumbled = "?" total_nr_stumbled_candidates = sum(len(members) for members in total_stumbled_candidates[c.hex_cid].values()) communities_dict[c.hex_cid] = {'classification': c.classification, 'global_time': c.global_time, 'sync_bloom_new': c.sync_bloom_new, 'sync_bloom_reuse': c.sync_bloom_reuse, 'sync_bloom_send': c.sync_bloom_send, 'sync_bloom_skip': c.sync_bloom_skip, 'nr_candidates': len(c.candidates) if c.candidates else 0, 'nr_walked': nr_walked, 'nr_stumbled': nr_stumbled, 'nr_intro' : nr_intro, 'total_stumbled_candidates': total_nr_stumbled_candidates} # check for missing communities, reset candidates to 0 cur_cids = communities_dict.keys() for cid, c in prev_statistics.get('communities', {}).iteritems(): if cid not in cur_cids: _c = c.copy() _c['nr_candidates'] = "?" _c['nr_walked'] = "?" _c['nr_stumbled'] = "?" _c['nr_intro'] = "?" communities_dict[cid] = _c statistics_dict = {'conn_type': self._dispersy.statistics.connection_type, 'received_count': self._dispersy.statistics.total_received, 'success_count': self._dispersy.statistics.msg_statistics.success_count, 'drop_count': self._dispersy.statistics.msg_statistics.drop_count, 'delay_count': self._dispersy.statistics.msg_statistics.delay_received_count, 'delay_success': self._dispersy.statistics.msg_statistics.delay_success_count, 'delay_timeout': self._dispersy.statistics.msg_statistics.delay_timeout_count, 'delay_send': self._dispersy.statistics.msg_statistics.delay_send_count, 'created_count': self._dispersy.statistics.msg_statistics.created_count, 'total_up': self._dispersy.statistics.total_up, 'total_down': self._dispersy.statistics.total_down, 'total_send': self._dispersy.statistics.total_send, 'cur_sendqueue': self._dispersy.statistics.cur_sendqueue, 'total_candidates_discovered': self._dispersy.statistics.total_candidates_discovered, 'walk_attempt': self._dispersy.statistics.walk_attempt_count, 'walk_success': self._dispersy.statistics.walk_success_count, 'walk_invalid_response_identifier': self._dispersy.statistics.invalid_response_identifier_count, 'is_online': self.is_online(), 'communities': communities_dict} prev_statistics = self.print_on_change("statistics", prev_statistics, statistics_dict) prev_total_dropped = self.print_on_change("statistics-dropped-messages", prev_total_dropped, self._dispersy.statistics.msg_statistics.drop_dict) prev_total_delayed = self.print_on_change("statistics-delayed-messages", prev_total_delayed, self._dispersy.statistics.msg_statistics.delay_dict) prev_total_received = self.print_on_change("statistics-successful-messages", prev_total_received, self._dispersy.statistics.msg_statistics.success_dict) prev_total_outgoing = self.print_on_change("statistics-outgoing-messages", prev_total_outgoing, self._dispersy.statistics.msg_statistics.outgoing_dict) prev_created_messages = self.print_on_change("statistics-created-messages", prev_created_messages, self._dispersy.statistics.msg_statistics.created_dict) prev_total_fail = self.print_on_change("statistics-walk-fail", prev_total_fail, self._dispersy.statistics.walk_failure_dict) prev_endpoint_recv = self.print_on_change("statistics-endpoint-recv", prev_endpoint_recv, self._dispersy.statistics.endpoint_recv) prev_endpoint_send = self.print_on_change("statistics-endpoint-send", prev_endpoint_send, self._dispersy.statistics.endpoint_send) yield deferLater(reactor, 5.0, lambda : None)
class TestCircuitDebugEndpoint(AbstractApiTest): @blocking_call_on_reactor_thread @inlineCallbacks def setUp(self, autoload_discovery=True): yield super(TestCircuitDebugEndpoint, self).setUp(autoload_discovery=autoload_discovery) self.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) self.dispersy._database.open() master_member = DummyMember(self.dispersy, 1, "a" * 20) member = self.dispersy.get_new_member(u"curve25519") self.tunnel_community = HiddenTunnelCommunity(self.dispersy, master_member, member) self.dispersy.get_communities = lambda: [self.tunnel_community] self.session.get_dispersy_instance = lambda: self.dispersy def setUpPreSession(self): super(TestCircuitDebugEndpoint, self).setUpPreSession() self.config.set_tunnel_community_enabled(True) @deferred(timeout=10) def test_get_circuit_no_community(self): """ Testing whether the API returns error 404 if no tunnel community is loaded """ self.dispersy.get_communities = lambda: [] return self.do_request('debug/circuits', expected_code=404) @deferred(timeout=10) def test_get_circuits(self): """ Testing whether the API returns the correct circuits """ mock_hop = MockObject() mock_hop.host = 'somewhere' mock_hop.port = 4242 mock_circuit = MockObject() mock_circuit.state = 'TESTSTATE' mock_circuit.goal_hops = 42 mock_circuit.bytes_up = 200 mock_circuit.bytes_down = 400 mock_circuit.creation_time = 1234 mock_circuit.hops = [mock_hop] self.tunnel_community.circuits = {'abc': mock_circuit} def verify_response(response): response_json = json.loads(response) self.assertEqual(len(response_json['circuits']), 1) self.assertEqual(response_json['circuits'][0]['state'], 'TESTSTATE') self.assertEqual(response_json['circuits'][0]['bytes_up'], 200) self.assertEqual(response_json['circuits'][0]['bytes_down'], 400) self.assertEqual(len(response_json['circuits'][0]['hops']), 1) self.assertEqual(response_json['circuits'][0]['hops'][0]['host'], 'somewhere:4242') self.should_check_equality = False return self.do_request('debug/circuits', expected_code=200).addCallback(verify_response)
class TriblerLaunchMany(TaskManager): def __init__(self): """ Called only once (unless we have multiple Sessions) by MainThread """ super(TriblerLaunchMany, self).__init__() self.initComplete = False self.registered = False self.dispersy = None self._logger = logging.getLogger(self.__class__.__name__) self.downloads = {} self.upnp_ports = [] self.session = None self.sesslock = None self.sessdoneflag = Event() self.shutdownstarttime = None # modules self.threadpool = ThreadPoolManager() self.torrent_store = None self.metadata_store = None self.rtorrent_handler = None self.tftp_handler = None self.cat = None self.peer_db = None self.torrent_db = None self.mypref_db = None self.votecast_db = None self.channelcast_db = None self.search_manager = None self.channel_manager = None self.videoplayer = None self.mainline_dht = None self.ltmgr = None self.tracker_manager = None self.torrent_checker = None self.tunnel_community = None def register(self, session, sesslock, autoload_discovery=True): if not self.registered: self.registered = True self.session = session self.sesslock = sesslock if self.session.get_torrent_store(): from Tribler.Core.leveldbstore import LevelDbStore self.torrent_store = LevelDbStore(self.session.get_torrent_store_dir()) if self.session.get_enable_metadata(): from Tribler.Core.leveldbstore import LevelDbStore self.metadata_store = LevelDbStore(self.session.get_metadata_store_dir()) # torrent collecting: RemoteTorrentHandler if self.session.get_torrent_collecting(): from Tribler.Core.RemoteTorrentHandler import RemoteTorrentHandler self.rtorrent_handler = RemoteTorrentHandler(self.session) # TODO(emilon): move this to a megacache component or smth if self.session.get_megacache(): from Tribler.Core.CacheDB.SqliteCacheDBHandler import (PeerDBHandler, TorrentDBHandler, MyPreferenceDBHandler, VoteCastDBHandler, ChannelCastDBHandler) from Tribler.Category.Category import Category self._logger.debug('tlm: Reading Session state from %s', self.session.get_state_dir()) self.cat = Category.getInstance(self.session) # create DBHandlers self.peer_db = PeerDBHandler(self.session) self.torrent_db = TorrentDBHandler(self.session) self.mypref_db = MyPreferenceDBHandler(self.session) self.votecast_db = VoteCastDBHandler(self.session) self.channelcast_db = ChannelCastDBHandler(self.session) # initializes DBHandlers self.peer_db.initialize() self.torrent_db.initialize() self.mypref_db.initialize() self.votecast_db.initialize() self.channelcast_db.initialize() from Tribler.Core.Modules.tracker_manager import TrackerManager self.tracker_manager = TrackerManager(self.session) self.tracker_manager.initialize() if self.session.get_videoplayer(): self.videoplayer = VideoPlayer(self.session) # Dispersy self.session.dispersy_member = None self.tftp_handler = None if self.session.get_dispersy(): from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import StandaloneEndpoint # set communication endpoint endpoint = StandaloneEndpoint(self.session.get_dispersy_port(), ip=self.session.get_ip()) working_directory = unicode(self.session.get_state_dir()) self.dispersy = Dispersy(endpoint, working_directory) # register TFTP service from Tribler.Core.TFTP.handler import TftpHandler self.tftp_handler = TftpHandler(self.session, endpoint, "fffffffd".decode('hex'), block_size=1024) self.tftp_handler.initialize() if self.session.get_enable_torrent_search() or self.session.get_enable_channel_search(): self.search_manager = SearchManager(self.session) self.search_manager.initialize() if self.session.get_enable_channel_search(): from Tribler.Core.Modules.channel_manager import ChannelManager self.channel_manager = ChannelManager(self.session) self.channel_manager.initialize() if not self.initComplete: self.init(autoload_discovery) def init(self, autoload_discovery): if self.dispersy: from Tribler.dispersy.community import HardKilledCommunity self._logger.info("lmc: Starting Dispersy...") now = timemod.time() success = self.dispersy.start(autoload_discovery) diff = timemod.time() - now if success: self._logger.info("lmc: Dispersy started successfully in %.2f seconds [port: %d]", diff, self.dispersy.wan_address[1]) else: self._logger.info("lmc: Dispersy failed to start in %.2f seconds", diff) self.upnp_ports.append((self.dispersy.wan_address[1], 'UDP')) from Tribler.dispersy.crypto import M2CryptoSK self.session.dispersy_member = blockingCallFromThread(reactor, self.dispersy.get_member, private_key=self.dispersy.crypto.key_to_bin(M2CryptoSK(filename=self.session.get_permid_keypair_filename()))) blockingCallFromThread(reactor, self.dispersy.define_auto_load, HardKilledCommunity, self.session.dispersy_member, load=True) if self.session.get_megacache(): self.dispersy.database.attach_commit_callback(self.session.sqlite_db.commit_now) # notify dispersy finished loading self.session.notifier.notify(NTFY_DISPERSY, NTFY_STARTED, None) @blocking_call_on_reactor_thread def load_communities(): # load communities # Search Community if self.session.get_enable_torrent_search(): from Tribler.community.search.community import SearchCommunity self.dispersy.define_auto_load(SearchCommunity, self.session.dispersy_member, load=True, kargs={'tribler_session': self.session}) # AllChannel Community if self.session.get_enable_channel_search(): from Tribler.community.allchannel.community import AllChannelCommunity self.dispersy.define_auto_load(AllChannelCommunity, self.session.dispersy_member, load=True, kargs={'tribler_session': self.session}) load_communities() from Tribler.Core.DecentralizedTracking import mainlineDHT try: self.mainline_dht = mainlineDHT.init(('127.0.0.1', self.session.get_mainline_dht_listen_port()), self.session.get_state_dir()) self.upnp_ports.append((self.session.get_mainline_dht_listen_port(), 'UDP')) except: print_exc() if self.session.get_libtorrent(): from Tribler.Core.Libtorrent.LibtorrentMgr import LibtorrentMgr self.ltmgr = LibtorrentMgr(self.session) self.ltmgr.initialize() # FIXME(lipu): upnp APIs are not exported in libtorrent python-binding. #for port, protocol in self.upnp_ports: # self.ltmgr.add_upnp_mapping(port, protocol) # add task for tracker checking if self.session.get_torrent_checking(): try: from Tribler.Core.TorrentChecker.torrent_checker import TorrentChecker self.torrent_checker = TorrentChecker(self.session) self.torrent_checker.initialize() except: print_exc() if self.rtorrent_handler: self.rtorrent_handler.initialize() self.initComplete = True def add(self, tdef, dscfg, pstate=None, initialdlstatus=None, setupDelay=0, hidden=False): """ Called by any thread """ d = None self.sesslock.acquire() try: if not isinstance(tdef, TorrentDefNoMetainfo) and not tdef.is_finalized(): raise ValueError("TorrentDef not finalized") infohash = tdef.get_infohash() # Check if running or saved on disk if infohash in self.downloads: raise DuplicateDownloadException() from Tribler.Core.Libtorrent.LibtorrentDownloadImpl import LibtorrentDownloadImpl d = LibtorrentDownloadImpl(self.session, tdef) if pstate is None: # not already resuming pstate = self.load_download_pstate_noexc(infohash) if pstate is not None: self._logger.debug("tlm: add: pstate is %s %s", pstate.get('dlstate', 'status'), pstate.get('dlstate', 'progress')) # Store in list of Downloads, always. self.downloads[infohash] = d d.setup(dscfg, pstate, initialdlstatus, self.network_engine_wrapper_created_callback, wrapperDelay=setupDelay) finally: self.sesslock.release() if d and not hidden and self.session.get_megacache(): @forceDBThread def write_my_pref(): torrent_id = self.torrent_db.getTorrentID(infohash) data = {'destination_path': d.get_dest_dir()} self.mypref_db.addMyPreference(torrent_id, data) if isinstance(tdef, TorrentDefNoMetainfo): self.torrent_db.addOrGetTorrentID(tdef.get_infohash()) self.torrent_db.updateTorrent(tdef.get_infohash(), name=tdef.get_name_as_unicode()) write_my_pref() elif self.rtorrent_handler: self.rtorrent_handler.save_torrent(tdef, write_my_pref) else: self.torrent_db.addExternalTorrent(tdef, extra_info={'status': 'good'}) write_my_pref() return d def network_engine_wrapper_created_callback(self, d, pstate): """ Called by network thread """ try: if pstate is None: # Checkpoint at startup (infohash, pstate) = d.network_checkpoint() self.save_download_pstate(infohash, pstate) except: print_exc() def remove(self, d, removecontent=False, removestate=True, hidden=False): """ Called by any thread """ with self.sesslock: d.stop_remove(removestate=removestate, removecontent=removecontent) infohash = d.get_def().get_infohash() if infohash in self.downloads: del self.downloads[infohash] if not hidden: self.remove_id(infohash) def remove_id(self, infohash): @forceDBThread def do_db(infohash): torrent_id = self.torrent_db.getTorrentID(infohash) if torrent_id: self.mypref_db.deletePreference(torrent_id) if self.session.get_megacache(): do_db(infohash) def get_downloads(self): """ Called by any thread """ with self.sesslock: return self.downloads.values() # copy, is mutable def get_download(self, infohash): """ Called by any thread """ with self.sesslock: return self.downloads.get(infohash, None) def download_exists(self, infohash): with self.sesslock: return infohash in self.downloads def update_trackers(self, infohash, trackers): """ Update the trackers for a download. :param infohash: infohash of the torrent that needs to be updated :param trackers: A list of tracker urls. """ dl = self.get_download(infohash) old_def = dl.get_def() if dl else None if old_def: old_trackers = old_def.get_trackers_as_single_tuple() new_trackers = list(set(trackers) - set(old_trackers)) all_trackers = list(old_trackers) + new_trackers if new_trackers: # Add new trackers to the download dl.add_trackers(new_trackers) # Create a new TorrentDef if isinstance(old_def, TorrentDefNoMetainfo): new_def = TorrentDefNoMetainfo(old_def.get_infohash(), old_def.get_name(), dl.get_magnet_link()) else: metainfo = old_def.get_metainfo() if len(all_trackers) > 1: metainfo["announce-list"] = [all_trackers] else: metainfo["announce"] = all_trackers[0] new_def = TorrentDef.load_from_dict(metainfo) # Set TorrentDef + checkpoint dl.set_def(new_def) dl.checkpoint() if isinstance(old_def, TorrentDefNoMetainfo): @forceDBThread def update_trackers_db(infohash, new_trackers): torrent_id = self.torrent_db.getTorrentID(infohash) if torrent_id is not None: self.torrent_db.addTorrentTrackerMappingInBatch(torrent_id, new_trackers) self.session.notifier.notify(NTFY_TORRENTS, NTFY_UPDATE, infohash) if self.session.get_megacache(): update_trackers_db(infohash, new_trackers) elif not isinstance(old_def, TorrentDefNoMetainfo) and self.rtorrent_handler: # Update collected torrents self.rtorrent_handler.save_torrent(new_def) # # State retrieval # def set_download_states_callback(self, usercallback, getpeerlist, when=0.0): """ Called by any thread """ for d in self.downloads.values(): # Arno, 2012-05-23: At Niels' request to get total transferred # stats. Causes MOREINFO message to be sent from swift proc # for every initiated dl. # 2012-07-31: Turn MOREINFO on/off on demand for efficiency. # 2013-04-17: Libtorrent now uses set_moreinfo_stats as well. d.set_moreinfo_stats(True in getpeerlist or d.get_def().get_infohash() in getpeerlist) network_set_download_states_callback_lambda = lambda: self.network_set_download_states_callback(usercallback) self.threadpool.add_task(network_set_download_states_callback_lambda, when) def network_set_download_states_callback(self, usercallback): """ Called by network thread """ dslist = [] for d in self.downloads.values(): try: ds = d.network_get_state(None, False) dslist.append(ds) except: # Niels, 2012-10-18: If Swift connection is crashing, it will raise an exception # We're catching it here to continue building the downloadstates print_exc() # Invoke the usercallback function on a separate thread. # After the callback is invoked, the return values will be passed to the # returncallback for post-callback processing. def session_getstate_usercallback_target(): when, newgetpeerlist = usercallback(dslist) if when > 0.0: # reschedule self.set_download_states_callback(usercallback, newgetpeerlist, when=when) self.threadpool.add_task(session_getstate_usercallback_target) # # Persistence methods # def load_checkpoint(self, initialdlstatus=None, initialdlstatus_dict={}): """ Called by any thread """ def do_load_checkpoint(initialdlstatus, initialdlstatus_dict): with self.sesslock: for i, filename in enumerate(iglob(os.path.join(self.session.get_downloads_pstate_dir(), '*.state'))): self.resume_download(filename, initialdlstatus, initialdlstatus_dict, setupDelay=i * 0.1) if self.initComplete: do_load_checkpoint(initialdlstatus, initialdlstatus_dict) else: self.register_task("load_checkpoint", reactor.callLater(1, do_load_checkpoint)) def load_download_pstate_noexc(self, infohash): """ Called by any thread, assume sesslock already held """ try: basename = binascii.hexlify(infohash) + '.state' filename = os.path.join(self.session.get_downloads_pstate_dir(), basename) if os.path.exists(filename): return self.load_download_pstate(filename) else: self._logger.info("%s not found", basename) except Exception: self._logger.exception("Exception while loading pstate: %s", infohash) def resume_download(self, filename, initialdlstatus=None, initialdlstatus_dict={}, setupDelay=0): tdef = dscfg = pstate = None try: pstate = self.load_download_pstate(filename) # SWIFTPROC metainfo = pstate.get('state', 'metainfo') if 'infohash' in metainfo: tdef = TorrentDefNoMetainfo(metainfo['infohash'], metainfo['name'], metainfo.get('url', None)) else: tdef = TorrentDef.load_from_dict(metainfo) if pstate.has_option('downloadconfig', 'saveas') and \ isinstance(pstate.get('downloadconfig', 'saveas'), tuple): pstate.set('downloadconfig', 'saveas', pstate.get('downloadconfig', 'saveas')[-1]) dscfg = DownloadStartupConfig(pstate) except: # pstate is invalid or non-existing _, file = os.path.split(filename) infohash = binascii.unhexlify(file[:-6]) torrent_data = self.torrent_store.get(infohash) if torrent_data: tdef = TorrentDef.load_from_memory(torrent_data) defaultDLConfig = DefaultDownloadStartupConfig.getInstance() dscfg = defaultDLConfig.copy() if self.mypref_db is not None: dest_dir = self.mypref_db.getMyPrefStatsInfohash(infohash) if dest_dir: if os.path.isdir(dest_dir) or dest_dir == '': dscfg.set_dest_dir(dest_dir) self._logger.debug("tlm: load_checkpoint: pstate is %s %s", pstate.get('dlstate', 'status'), pstate.get('dlstate', 'progress')) if pstate is None or pstate.get('state', 'engineresumedata') is None: self._logger.debug("tlm: load_checkpoint: resumedata None") else: self._logger.debug("tlm: load_checkpoint: resumedata len %d", len(pstate.get('state', 'engineresumedata'))) if tdef and dscfg: if dscfg.get_dest_dir() != '': # removed torrent ignoring try: if not self.download_exists(tdef.get_infohash()): initialdlstatus = initialdlstatus_dict.get(tdef.get_infohash(), initialdlstatus) self.add(tdef, dscfg, pstate, initialdlstatus, setupDelay=setupDelay) else: self._logger.info("tlm: not resuming checkpoint because download has already been added") except Exception as e: self._logger.exception("tlm: load check_point: exception while adding download %s", tdef) else: self._logger.info("tlm: removing checkpoint %s destdir is %s", filename, dscfg.get_dest_dir()) os.remove(filename) else: self._logger.info("tlm: could not resume checkpoint %s %s %s", filename, tdef, dscfg) def checkpoint(self, stop=False, checkpoint=True, gracetime=2.0): """ Called by any thread, assume sesslock already held """ # Even if the list of Downloads changes in the mean time this is # no problem. For removals, dllist will still hold a pointer to the # Download, and additions are no problem (just won't be included # in list of states returned via callback. # dllist = self.downloads.values() self._logger.debug("tlm: checkpointing %s stopping %s", len(dllist), stop) network_checkpoint_callback_lambda = lambda: self.network_checkpoint_callback(dllist, stop, checkpoint, gracetime) self.threadpool.add_task(network_checkpoint_callback_lambda, 0.0) def network_checkpoint_callback(self, dllist, stop, checkpoint, gracetime): """ Called by network thread """ if checkpoint: for d in dllist: try: # Tell all downloads to stop, and save their persistent state # in a infohash -> pstate dict which is then passed to the user # for storage. # if stop: (infohash, pstate) = d.network_stop(False, False) else: (infohash, pstate) = d.network_checkpoint() self._logger.debug("tlm: network checkpointing: %s %s", d.get_def().get_name(), pstate) self.save_download_pstate(infohash, pstate) except Exception as e: self._logger.exception("Exception while checkpointing: %s", d.get_def().get_name()) if stop: # Some grace time for early shutdown tasks if self.shutdownstarttime is not None: now = timemod.time() diff = now - self.shutdownstarttime if diff < gracetime: self._logger.info("tlm: shutdown: delaying for early shutdown tasks %s", gracetime - diff) delay = gracetime - diff network_shutdown_callback_lambda = lambda: self.network_shutdown() self.threadpool.add_task(network_shutdown_callback_lambda, delay) return self.network_shutdown() def remove_pstate(self, infohash): network_remove_pstate_callback_lambda = lambda: self.network_remove_pstate_callback(infohash) self.threadpool.add_task(network_remove_pstate_callback_lambda, 0.0) def network_remove_pstate_callback(self, infohash): if not self.download_exists(infohash): dlpstatedir = self.session.get_downloads_pstate_dir() # Remove checkpoint hexinfohash = binascii.hexlify(infohash) try: basename = hexinfohash + '.state' filename = os.path.join(dlpstatedir, basename) self._logger.debug("remove pstate: removing dlcheckpoint entry %s", filename) if os.access(filename, os.F_OK): os.remove(filename) except: # Show must go on self._logger.exception("Could not remove state") else: self._logger.warning("remove pstate: download is back, restarted? Canceling removal! %s", repr(infohash)) def early_shutdown(self): """ Called as soon as Session shutdown is initiated. Used to start shutdown tasks that takes some time and that can run in parallel to checkpointing, etc. """ self._logger.info("tlm: early_shutdown") self.cancel_all_pending_tasks() # Note: sesslock not held self.shutdownstarttime = timemod.time() if self.torrent_checker: self.torrent_checker.shutdown() self.torrent_checker = None if self.channel_manager: self.channel_manager.shutdown() self.channel_manager = None if self.search_manager: self.search_manager.shutdown() self.search_manager = None if self.rtorrent_handler: self.rtorrent_handler.shutdown() self.rtorrent_handler = None if self.videoplayer: self.videoplayer.shutdown() self.videoplayer = None if self.tracker_manager: self.tracker_manager.shutdown() self.tracker_manager = None if self.dispersy: self._logger.info("lmc: Shutting down Dispersy...") now = timemod.time() try: success = self.dispersy.stop() except: print_exc() success = False diff = timemod.time() - now if success: self._logger.info("lmc: Dispersy successfully shutdown in %.2f seconds", diff) else: self._logger.info("lmc: Dispersy failed to shutdown in %.2f seconds", diff) if self.metadata_store is not None: self.metadata_store.close() self.metadata_store = None if self.tftp_handler: self.tftp_handler.shutdown() self.tftp_handler = None if self.session.get_megacache(): self.channelcast_db.close() self.votecast_db.close() self.mypref_db.close() self.torrent_db.close() self.peer_db.close() self.channelcast_db = None self.votecast_db = None self.mypref_db = None self.torrent_db = None self.peer_db = None if self.mainline_dht: from Tribler.Core.DecentralizedTracking import mainlineDHT mainlineDHT.deinit(self.mainline_dht) self.mainline_dht = None if self.torrent_store is not None: self.torrent_store.close() self.torrent_store = None def network_shutdown(self): try: self._logger.info("tlm: network_shutdown") ts = enumerate_threads() self._logger.info("tlm: Number of threads still running %d", len(ts)) for t in ts: self._logger.info("tlm: Thread still running=%s, daemon=%s, instance=%s", t.getName(), t.isDaemon(), t) except: print_exc() # Stop network thread self.sessdoneflag.set() # Shutdown libtorrent session after checkpoints have been made if self.ltmgr: self.ltmgr.shutdown() self.ltmgr = None if self.threadpool: self.threadpool.cancel_all_pending_tasks() self.threadpool = None def save_download_pstate(self, infohash, pstate): """ Called by network thread """ basename = binascii.hexlify(infohash) + '.state' filename = os.path.join(self.session.get_downloads_pstate_dir(), basename) self._logger.debug("tlm: network checkpointing: to file %s", filename) pstate.write_file(filename) def load_download_pstate(self, filename): """ Called by any thread """ pstate = CallbackConfigParser() pstate.read_file(filename) return pstate # Events from core meant for API user # def sessconfig_changed_callback(self, section, name, new_value, old_value): value_changed = new_value != old_value if section == 'libtorrent' and name == 'utp': if self.ltmgr and value_changed: self.ltmgr.set_utp(new_value) elif section == 'libtorrent' and name == 'lt_proxyauth': if self.ltmgr: self.ltmgr.set_proxy_settings(None, *self.session.get_libtorrent_proxy_settings()) # Return True/False, depending on whether or not the config value can be changed at runtime. elif (section == 'general' and name in ['nickname', 'mugshot', 'videoanalyserpath']) or \ (section == 'libtorrent' and name in ['lt_proxytype', 'lt_proxyserver', 'anon_proxyserver', 'anon_proxytype', 'anon_proxyauth', 'anon_listen_port']) or \ (section == 'torrent_collecting' and name in ['stop_collecting_threshold']) or \ (section == 'tunnel_community' and name in ['socks5_listen_port']): return True else: return False return True
class TestCircuitDebugEndpoint(AbstractApiTest): @blocking_call_on_reactor_thread @inlineCallbacks def setUp(self, autoload_discovery=True): yield super(TestCircuitDebugEndpoint, self).setUp(autoload_discovery=autoload_discovery) self.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) self.dispersy._database.open() master_member = DummyMember(self.dispersy, 1, "a" * 20) member = self.dispersy.get_new_member(u"curve25519") self.tunnel_community = HiddenTunnelCommunity(self.dispersy, master_member, member) self.dispersy.get_communities = lambda: [self.tunnel_community] self.session.get_dispersy_instance = lambda: self.dispersy def setUpPreSession(self): super(TestCircuitDebugEndpoint, self).setUpPreSession() self.config.set_tunnel_community_enabled(True) @deferred(timeout=10) def test_get_circuit_no_community(self): """ Testing whether the API returns error 404 if no tunnel community is loaded """ self.dispersy.get_communities = lambda: [] return self.do_request('debug/circuits', expected_code=404) @deferred(timeout=10) def test_get_circuits(self): """ Testing whether the API returns the correct circuits """ mock_hop = MockObject() mock_hop.host = 'somewhere' mock_hop.port = 4242 mock_circuit = MockObject() mock_circuit.state = 'TESTSTATE' mock_circuit.goal_hops = 42 mock_circuit.bytes_up = 200 mock_circuit.bytes_down = 400 mock_circuit.creation_time = 1234 mock_circuit.hops = [mock_hop] self.tunnel_community.circuits = {'abc': mock_circuit} def verify_response(response): response_json = json.loads(response) self.assertEqual(len(response_json['circuits']), 1) self.assertEqual(response_json['circuits'][0]['state'], 'TESTSTATE') self.assertEqual(response_json['circuits'][0]['bytes_up'], 200) self.assertEqual(response_json['circuits'][0]['bytes_down'], 400) self.assertEqual(len(response_json['circuits'][0]['hops']), 1) self.assertEqual(response_json['circuits'][0]['hops'][0]['host'], 'somewhere:4242') self.should_check_equality = False return self.do_request('debug/circuits', expected_code=200).addCallback(verify_response) @deferred(timeout=10) def test_get_open_files(self): """ Test whether the API returns open files """ def verify_response(response): response_json = json.loads(response) self.assertGreaterEqual(len(response_json['open_files']), 1) self.should_check_equality = False return self.do_request('debug/open_files', expected_code=200).addCallback(verify_response) @deferred(timeout=10) def test_get_open_sockets(self): """ Test whether the API returns open sockets """ def verify_response(response): response_json = json.loads(response) self.assertGreaterEqual(len(response_json['open_sockets']), 1) self.should_check_equality = False return self.do_request('debug/open_sockets', expected_code=200).addCallback(verify_response) @deferred(timeout=10) def test_get_threads(self): """ Test whether the API returns open threads """ def verify_response(response): response_json = json.loads(response) self.assertGreaterEqual(len(response_json['threads']), 1) self.should_check_equality = False return self.do_request('debug/threads', expected_code=200).addCallback(verify_response) @deferred(timeout=10) def test_get_cpu_history(self): """ Test whether the API returns the cpu history """ def verify_response(response): response_json = json.loads(response) self.assertGreaterEqual(len(response_json['cpu_history']), 1) self.session.lm.resource_monitor.check_resources() self.should_check_equality = False return self.do_request('debug/cpu/history', expected_code=200).addCallback(verify_response) @deferred(timeout=10) def test_get_memory_history(self): """ Test whether the API returns the memory history """ def verify_response(response): response_json = json.loads(response) self.assertGreaterEqual(len(response_json['memory_history']), 1) self.session.lm.resource_monitor.check_resources() self.should_check_equality = False return self.do_request('debug/memory/history', expected_code=200).addCallback(verify_response) @deferred(timeout=60) def test_dump_memory(self): """ Test whether the API returns a memory dump """ def verify_response(response): self.assertTrue(response) self.should_check_equality = False return self.do_request('debug/memory/dump', expected_code=200).addCallback(verify_response) @deferred(timeout=10) def test_debug_pane_core_logs(self): """ Test whether the API returns the logs """ test_core_log_message = "This is the core test log message" max_lines = 100 # Log directory log_dir = SessionStartupConfig.load().get_log_dir() if not os.path.exists(log_dir): os.makedirs(log_dir) # Fill logging files with statements core_info_log_file_path = os.path.join(log_dir, 'tribler-core-info.log') # write 100 test lines which is used to test for its presence in the response with open(core_info_log_file_path, "w") as core_info_log_file: for log_index in xrange(max_lines): core_info_log_file.write("%s %d\n" % (test_core_log_message, log_index)) def verify_log_exists(response): json_response = json.loads(response) logs = json_response['content'].strip().split("\n") # Check number of logs returned is correct self.assertEqual(len(logs), max_lines) # Check if test log message is present in the logs, at least once log_exists = any( (True for log in logs if test_core_log_message in log)) self.assertTrue(log_exists, "Test log not found in the debug log response") self.should_check_equality = False return self.do_request('debug/log?process=core&max_lines=%d' % max_lines, expected_code=200)\ .addCallback(verify_log_exists)\ @deferred(timeout=10) def test_debug_pane_default_num_logs(self): """ Test whether the API returns the last 100 logs when no max_lines parameter is not provided """ test_core_log_message = "This is the gui test log message" expected_num_lines = 100 # Log directory log_dir = SessionStartupConfig.load().get_log_dir() if not os.path.exists(log_dir): os.makedirs(log_dir) gui_info_log_file_path = os.path.join(log_dir, 'tribler-gui-info.log') # write 200 (greater than expected_num_lines) test logs in file with open(gui_info_log_file_path, "w") as core_info_log_file: for log_index in xrange(200): # write more logs core_info_log_file.write("%s %d\n" % (test_core_log_message, log_index)) # Check number of logs returned is as expected def verify_max_logs_returned(response): json_response = json.loads(response) logs = json_response['content'].strip().split("\n") self.assertEqual(len(logs), expected_num_lines) self.should_check_equality = False return self.do_request('debug/log?process=gui&max_lines=', expected_code=200)\ .addCallback(verify_max_logs_returned)
def register(self, session, sesslock): if not self.registered: self.registered = True self.session = session self.sesslock = sesslock self.downloads = {} config = session.sessconfig # Should be safe at startup self.upnp_ports = [] # Orig self.sessdoneflag = Event() self.rawserver = RawServer(self.sessdoneflag, config['timeout_check_interval'], config['timeout'], ipv6_enable=config['ipv6_enabled'], failfunc=self.rawserver_fatalerrorfunc, errorfunc=self.rawserver_nonfatalerrorfunc) self.rawserver.add_task(self.rawserver_keepalive, 1) self.listen_port = config['minport'] self.shutdownstarttime = None self.multihandler = MultiHandler(self.rawserver, self.sessdoneflag) # SWIFTPROC swift_exists = config['swiftproc'] and (os.path.exists(config['swiftpath']) or os.path.exists(config['swiftpath'] + '.exe')) if swift_exists: from Tribler.Core.Swift.SwiftProcessMgr import SwiftProcessMgr self.spm = SwiftProcessMgr(config['swiftpath'], config['swiftcmdlistenport'], config['swiftdlsperproc'], self.session.get_swift_tunnel_listen_port(), self.sesslock) try: self.swift_process = self.spm.get_or_create_sp(self.session.get_swift_working_dir(), self.session.get_torrent_collecting_dir(), self.session.get_swift_tunnel_listen_port(), self.session.get_swift_tunnel_httpgw_listen_port(), self.session.get_swift_tunnel_cmdgw_listen_port()) self.upnp_ports.append((self.session.get_swift_tunnel_listen_port(), 'UDP')) except OSError: # could not find/run swift print >> sys.stderr, "lmc: could not start a swift process" else: self.spm = None self.swift_process = None # Dispersy self.session.dispersy_member = None if config['dispersy']: from Tribler.dispersy.callback import Callback from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import RawserverEndpoint, TunnelEndpoint from Tribler.dispersy.community import HardKilledCommunity # set communication endpoint if config['dispersy-tunnel-over-swift'] and self.swift_process: endpoint = TunnelEndpoint(self.swift_process) else: endpoint = RawserverEndpoint(self.rawserver, config['dispersy_port']) callback = Callback("Dispersy") # WARNING NAME SIGNIFICANT working_directory = unicode(config['state_dir']) self.dispersy = Dispersy(callback, endpoint, working_directory) # TODO: see if we can postpone dispersy.start to improve GUI responsiveness. # However, for now we must start self.dispersy.callback before running # try_register(nocachedb, self.database_thread)! self.dispersy.start() print >> sys.stderr, "lmc: Dispersy is listening on port", self.dispersy.wan_address[1], "using", endpoint self.upnp_ports.append((self.dispersy.wan_address[1], 'UDP')) self.dispersy.callback.call(self.dispersy.define_auto_load, args=(HardKilledCommunity,), kargs={'load': True}) # notify dispersy finished loading self.session.uch.notify(NTFY_DISPERSY, NTFY_STARTED, None) from Tribler.Core.permid import read_keypair from Tribler.dispersy.crypto import ec_to_public_bin, ec_to_private_bin keypair = read_keypair(self.session.get_permid_keypair_filename()) self.session.dispersy_member = callback.call(self.dispersy.get_member, (ec_to_public_bin(keypair), ec_to_private_bin(keypair))) self.database_thread = callback else: class FakeCallback(): def __init__(self): from Tribler.Utilities.TimedTaskQueue import TimedTaskQueue self.queue = TimedTaskQueue("FakeCallback") def register(self, call, args=(), kargs=None, delay=0.0, priority=0, id_=u"", callback=None, callback_args=(), callback_kargs=None, include_id=False): def do_task(): if kargs: call(*args, **kargs) else: call(*args) if callback: if callback_kargs: callback(*callback_args, **callback_kargs) else: callback(*callback_args) self.queue.add_task(do_task, t=delay) def shutdown(self, immediately=False): self.queue.shutdown(immediately) self.database_thread = FakeCallback() if config['megacache']: import Tribler.Core.CacheDB.cachedb as cachedb from Tribler.Core.CacheDB.SqliteCacheDBHandler import PeerDBHandler, TorrentDBHandler, MyPreferenceDBHandler, VoteCastDBHandler, ChannelCastDBHandler, NetworkBuzzDBHandler, UserEventLogDBHandler from Tribler.Category.Category import Category from Tribler.Core.Tag.Extraction import TermExtraction from Tribler.Core.CacheDB.sqlitecachedb import try_register if DEBUG: print >> sys.stderr, 'tlm: Reading Session state from', config['state_dir'] nocachedb = cachedb.init(config, self.rawserver_fatalerrorfunc) try_register(nocachedb, self.database_thread) self.cat = Category.getInstance(config['install_dir']) self.term = TermExtraction.getInstance(config['install_dir']) self.peer_db = PeerDBHandler.getInstance() self.peer_db.registerConnectionUpdater(self.session) self.torrent_db = TorrentDBHandler.getInstance() self.torrent_db.register(os.path.abspath(config['torrent_collecting_dir'])) self.mypref_db = MyPreferenceDBHandler.getInstance() self.votecast_db = VoteCastDBHandler.getInstance() self.votecast_db.registerSession(self.session) self.channelcast_db = ChannelCastDBHandler.getInstance() self.channelcast_db.registerSession(self.session) self.nb_db = NetworkBuzzDBHandler.getInstance() self.ue_db = UserEventLogDBHandler.getInstance() if self.dispersy: self.dispersy.database.attach_commit_callback(self.channelcast_db._db.commitNow) else: config['torrent_checking'] = 0 self.rtorrent_handler = None if config['torrent_collecting']: from Tribler.Core.RemoteTorrentHandler import RemoteTorrentHandler self.rtorrent_handler = RemoteTorrentHandler()
def setUp(self): yield super(AbstractTestCommunity, self).setUp() self.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) self.dispersy._database.open() self.master_member = DummyMember(self.dispersy, 1, "a" * 20) self.member = self.dispersy.get_new_member(u"curve25519")
class TestCircuitDebugEndpoint(AbstractApiTest): @blocking_call_on_reactor_thread @inlineCallbacks def setUp(self, autoload_discovery=True): yield super(TestCircuitDebugEndpoint, self).setUp(autoload_discovery=autoload_discovery) self.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) self.dispersy._database.open() master_member = DummyMember(self.dispersy, 1, "a" * 20) member = self.dispersy.get_new_member(u"curve25519") self.tunnel_community = HiddenTunnelCommunity(self.dispersy, master_member, member) self.dispersy.get_communities = lambda: [self.tunnel_community] self.session.get_dispersy_instance = lambda: self.dispersy def setUpPreSession(self): super(TestCircuitDebugEndpoint, self).setUpPreSession() self.config.set_tunnel_community_enabled(True) @deferred(timeout=10) def test_get_circuit_no_community(self): """ Testing whether the API returns error 404 if no tunnel community is loaded """ self.dispersy.get_communities = lambda: [] return self.do_request('debug/circuits', expected_code=404) @deferred(timeout=10) def test_get_circuits(self): """ Testing whether the API returns the correct circuits """ mock_hop = MockObject() mock_hop.host = 'somewhere' mock_hop.port = 4242 mock_circuit = MockObject() mock_circuit.state = 'TESTSTATE' mock_circuit.goal_hops = 42 mock_circuit.bytes_up = 200 mock_circuit.bytes_down = 400 mock_circuit.creation_time = 1234 mock_circuit.hops = [mock_hop] self.tunnel_community.circuits = {'abc': mock_circuit} def verify_response(response): response_json = json.loads(response) self.assertEqual(len(response_json['circuits']), 1) self.assertEqual(response_json['circuits'][0]['state'], 'TESTSTATE') self.assertEqual(response_json['circuits'][0]['bytes_up'], 200) self.assertEqual(response_json['circuits'][0]['bytes_down'], 400) self.assertEqual(len(response_json['circuits'][0]['hops']), 1) self.assertEqual(response_json['circuits'][0]['hops'][0]['host'], 'somewhere:4242') self.should_check_equality = False return self.do_request('debug/circuits', expected_code=200).addCallback(verify_response) @deferred(timeout=10) def test_get_open_files(self): """ Test whether the API returns open files """ def verify_response(response): response_json = json.loads(response) self.assertGreaterEqual(len(response_json['open_files']), 1) self.should_check_equality = False return self.do_request('debug/open_files', expected_code=200).addCallback(verify_response) @deferred(timeout=10) def test_get_open_sockets(self): """ Test whether the API returns open sockets """ def verify_response(response): response_json = json.loads(response) self.assertGreaterEqual(len(response_json['open_sockets']), 1) self.should_check_equality = False return self.do_request('debug/open_sockets', expected_code=200).addCallback(verify_response) @deferred(timeout=10) def test_get_threads(self): """ Test whether the API returns open threads """ def verify_response(response): response_json = json.loads(response) self.assertGreaterEqual(len(response_json['threads']), 1) self.should_check_equality = False return self.do_request('debug/threads', expected_code=200).addCallback(verify_response) @deferred(timeout=10) def test_get_cpu_history(self): """ Test whether the API returns the cpu history """ def verify_response(response): response_json = json.loads(response) self.assertGreaterEqual(len(response_json['cpu_history']), 1) self.session.lm.resource_monitor.check_resources() self.should_check_equality = False return self.do_request('debug/cpu/history', expected_code=200).addCallback(verify_response) @deferred(timeout=10) def test_get_memory_history(self): """ Test whether the API returns the memory history """ def verify_response(response): response_json = json.loads(response) self.assertGreaterEqual(len(response_json['memory_history']), 1) self.session.lm.resource_monitor.check_resources() self.should_check_equality = False return self.do_request('debug/memory/history', expected_code=200).addCallback(verify_response) @deferred(timeout=10) def test_dump_memory(self): """ Test whether the API returns a memory dump """ def verify_response(response): self.assertTrue(response) self.should_check_equality = False return self.do_request('debug/memory/dump', expected_code=200).addCallback(verify_response) @deferred(timeout=10) def test_debug_pane_logs(self): """ Test whether the API returns the logs """ test_log_message = "This is the test log message" max_lines = 100 import Tribler project_root_dir = os.path.abspath( os.path.join(os.path.dirname(Tribler.__file__), "..")) log_config = os.path.join(project_root_dir, "logger.conf") # State directory for logs state_log_dir = os.path.join(self.session.config.get_state_dir(), 'logs') if not os.path.exists(state_log_dir): os.makedirs(state_log_dir) # Setup logging logging.info_log_file = os.path.join(state_log_dir, 'tribler-info.log') logging.error_log_file = os.path.join(state_log_dir, 'tribler-error.log') logging.config.fileConfig(log_config, disable_existing_loggers=False) def verify_log_exists(response): json_response = json.loads(response) logs = json_response['content'].strip().split("\n") # Check number of logs returned is correct self.assertEqual(len(logs), max_lines) # Check if test log message is present in the logs, at least once log_exists = any((True for log in logs if test_log_message in log)) self.assertTrue(log_exists, "Test log not found in the debug log response") # write 100 test logs which is used to test for its presence in the response for log_index in xrange(100): logging.error("%s [%d]", test_log_message, log_index) self.should_check_equality = False return self.do_request( 'debug/log?max_lines=%d' % max_lines, expected_code=200).addCallback(verify_log_exists)
def test_chn_max_torrents(self): """ Test the restriction of max_torrents in a source. """ self.session.get_dispersy = lambda: True self.session.lm.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) dispersy_cid_hex = "abcd" * 9 + "0012" dispersy_cid = binascii.unhexlify(dispersy_cid_hex) # create channel and insert torrent self.create_fake_allchannel_community() self.create_torrents_in_channel(dispersy_cid_hex) pioneer_file = os.path.join( TESTS_DATA_DIR, "Pioneer.One.S01E06.720p.x264-VODO.torrent") pioneer_tdef = TorrentDef.load(pioneer_file) pioneer_ihash = binascii.unhexlify( "66ED7F30E3B30FA647ABAA19A36E7503AA071535") torrent_list = [[ self.channel_id, 1, 1, pioneer_ihash, 1460000001, pioneer_file, pioneer_tdef.get_files_as_unicode_with_length(), pioneer_tdef.get_trackers_as_single_tuple() ]] self.insert_torrents_into_channel(torrent_list) self.boosting_manager.add_source(dispersy_cid) chn_obj = self.boosting_manager.get_source_object(dispersy_cid) chn_obj.max_torrents = 2 chn_obj.torrent_mgr.load_torrent = lambda dummy_1, dummy_2: None def _load(torrent, callback=None): if not isinstance(torrent, CollectedTorrent): torrent_id = 0 if torrent.torrent_id <= 0: torrent_id = self.session.lm.torrent_db.getTorrentID( torrent.infohash) if torrent_id: torrent.update_torrent_id(torrent_id) infohash_str = binascii.hexlify(torrent.infohash) torrent = CollectedTorrent( torrent, self.tdef if infohash_str.startswith("fc") else pioneer_tdef) if callback is not None: callback(torrent) else: return torrent def activate_mgr(): """ activate ltmgr and adjust max torrents to emulate overflow torrents """ chn_obj.max_torrents = 1 chn_obj.torrent_mgr.load_torrent = _load reactor.callLater(5, activate_mgr) def check_torrents_channel(src, defer_param=None): """ check if a torrent already in channel and ready to download """ if defer_param is None: defer_param = defer.Deferred() src_obj = self.boosting_manager.get_source_object(src) success = True if len(src_obj.unavail_torrent) == 0: self.assertLessEqual(len(src_obj.torrents), src_obj.max_torrents) else: success = False reactor.callLater(1, check_torrents_channel, src, defer_param) if success: src_obj.community.cancel_all_pending_tasks() src_obj.kill_tasks() defer_param.callback(src) return defer_param d = self.check_source(dispersy_cid) d.addCallback(check_torrents_channel) return d
def test_chn_lookup(self): """ testing channel source. It includes finding and downloading actual torrent """ self.session.get_dispersy = lambda: True self.session.lm.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) dispersy_cid_hex = "abcd" * 9 + "0012" dispersy_cid = binascii.unhexlify(dispersy_cid_hex) # create channel and insert torrent self.create_fake_allchannel_community() self.create_torrents_in_channel(dispersy_cid_hex) self.boosting_manager.add_source(dispersy_cid) chn_obj = self.boosting_manager.get_source_object(dispersy_cid) def _load(torrent, callback=None): if not isinstance(torrent, CollectedTorrent): torrent_id = 0 if torrent.torrent_id <= 0: torrent_id = self.session.lm.torrent_db.getTorrentID( torrent.infohash) if torrent_id: torrent.update_torrent_id(torrent_id) torrent = CollectedTorrent(torrent, self.tdef) if callback is not None: callback(torrent) else: return torrent def check_torrents_channel(src, defer_param=None, target=1): """ check if a torrent already in channel and ready to download """ if defer_param is None: defer_param = defer.Deferred() src_obj = self.boosting_manager.get_source_object(src) success = True if not src_obj or len(src_obj.torrents) < target: success = False reactor.callLater(1, check_torrents_channel, src, defer_param, target=target) elif not self.boosting_manager.torrents.get( TORRENT_FILE_INFOHASH, None): success = False reactor.callLater(1, check_torrents_channel, src, defer_param, target=target) elif not self.boosting_manager.torrents[TORRENT_FILE_INFOHASH].get( 'download', None): success = False reactor.callLater(1, check_torrents_channel, src, defer_param, target=target) if success: self.boosting_manager.set_enable_mining(src, False, force_restart=True) if src_obj.community: src_obj.community.cancel_all_pending_tasks() defer_param.callback(src) return defer_param chn_obj.torrent_mgr.load_torrent = _load d = self.check_source(dispersy_cid) d.addCallback(check_torrents_channel, target=1) return d
def __init__(self, parent): self.buildColumns = False self.dispersy = Dispersy.has_instance() if not self.dispersy: raise RuntimeError("Dispersy has not started yet") HomePanel.__init__(self, parent, 'Dispersy info', SEPARATOR_GREY) self.SetMinSize((-1, 200)) self.timer = wx.Timer(self) self.Bind(wx.EVT_TIMER, self._onTimer, self.timer) self.timer.Start(5000, False) self.UpdateStats() def ratio(i, j): return "%d / %d ~%.1f%%" % (i, j, (100.0 * i / j) if j else 0.0) self.mapping = [ ("WAN Address", '', lambda stats: "%s:%d" % stats.wan_address), ("LAN Address", '', lambda stats: "%s:%d" % stats.lan_address), ("Connection", '', lambda stats: str(stats.connection_type)), ("Runtime", '', lambda stats: self.utility.eta_value( stats.timestamp - stats.start)), ("Download", '', lambda stats: self.utility.size_format( stats.total_down) + " or " + self.utility.size_format( int(stats.total_down / (stats.timestamp - stats.start))) + "/s"), ("Upload", '', lambda stats: self.utility.size_format( stats.total_up) + " or " + self.utility.size_format( int(stats.total_up / (stats.timestamp - stats.start))) + "/s"), ("Packets send", 'Packets send vs Packets handled', lambda stats: ratio(stats.total_send, stats.received_count + stats.total_send)), ("Packets received", 'Packets received vs Packets handled', lambda stats: ratio(stats.received_count, stats.received_count + stats.total_send)), ("Packets dropped", 'Packets dropped vs Packets received', lambda stats: ratio(stats.drop_count, stats.received_count)), ("Packets success", 'Messages successfully handled vs Packets received', lambda stats: ratio(stats.success_count, stats.received_count)), ("Packets delayed", 'Packets being delayed vs Packets reveived', lambda stats: ratio(stats.delay_count, stats.received_count)), ("Sync-Messages created", 'Total number of sync messages created by us in this session', lambda stats: str(stats.created_count)), ("Candidates reuse", 'Candidates discovered (intro or stumbled) vs Candidates active in more than one community', lambda stats: ratio(stats.total_candidates_overlapped, stats. total_candidates_discovered)), ("Packets delayed send", 'Total number of delaymessages or delaypacket messages being sent', lambda stats: ratio(stats.delay_send, stats.delay_count)), ("Packets delayed success", 'Total number of packets which were delayed, and did not timeout', lambda stats: ratio(stats.delay_success, stats.delay_count)), ("Packets delayed timeout", 'Total number of packets which were delayed, but got a timeout', lambda stats: ratio(stats.delay_timeout, stats.delay_count)), ("Walker success", '', lambda stats: ratio(stats.walk_success, stats.walk_attempt)), ("Walker success (from trackers)", 'Comparing the successes to tracker to overall successes.', lambda stats: ratio(stats.walk_bootstrap_success, stats. walk_bootstrap_attempt)), ("Walker resets", '', lambda stats: str(stats.walk_reset)), ("Bloom reuse", 'Total number of bloomfilters reused vs bloomfilters sent in this session', lambda stats: ratio( sum(c.sync_bloom_reuse for c in stats.communities), sum(c.sync_bloom_send for c in stats.communities))), ("Revision", '', lambda stats: str(max(stats.revision.itervalues()))), ("Debug mode", '', lambda stats: "yes" if __debug__ else "no"), ]
def main(): command_line_parser = optparse.OptionParser() command_line_parser.add_option("--statedir", action="store", type="string", help="Use an alternate statedir") command_line_parser.add_option("--port", action="store", type="int", help="Listen at this port") command_line_parser.add_option("--rss", action="store", type="string", help="Url where to fetch rss feed, or several seperated with ';'") command_line_parser.add_option("--dir", action="store", type="string", help="Directory to watch for .torrent files, or several seperated with ';'") command_line_parser.add_option("--file", action="store", type="string", help="JSON file which has a community") command_line_parser.add_option("--nickname", action="store", type="string", help="The moderator name") command_line_parser.add_option("--channelname", action="store", type="string", help="The channel name") # parse command-line arguments opt, args = command_line_parser.parse_args() if not (opt.rss or opt.dir or opt.file): command_line_parser.print_help() print "\nExample: python Tribler/Main/metadata-injector.py --rss http://frayja.com/rss.php --nickname frayja --channelname goldenoldies" sys.exit() print "Type 'Q' to stop the metadata-injector" sscfg = SessionStartupConfig() if opt.statedir: sscfg.set_state_dir(unicode(os.path.realpath(opt.statedir))) if opt.port: sscfg.set_dispersy_port(opt.port) if opt.nickname: sscfg.set_nickname(opt.nickname) sscfg.set_megacache(True) sscfg.set_overlay(True) # turn torrent collecting on. this will cause torrents to be distributed sscfg.set_torrent_collecting(True) sscfg.set_dialback(False) sscfg.set_internal_tracker(False) session = Session(sscfg) #Wait for Dispersy if Dispersy.has_instance(): dispersy_started(session, opt) else: def notify(*args): dispersy_started(session, opt) session.add_observer(notify,NTFY_DISPERSY,[NTFY_STARTED]) # condition variable would be prettier, but that don't listen to # KeyboardInterrupt try: while True: x = sys.stdin.readline() print >> sys.stderr, x if x.strip() == 'Q': break except: print_exc() torrentfeed = RssParser.getInstance() torrentfeed.shutdown() dirfeed = DirectoryFeedThread.getInstance() dirfeed.shutdown() session.shutdown() print "Shutting down..." time.sleep(5)
def register(self, session, sesslock, autoload_discovery=True): if not self.registered: self.registered = True self.session = session self.sesslock = sesslock if self.session.get_torrent_store(): from Tribler.Core.leveldbstore import LevelDbStore self.torrent_store = LevelDbStore(self.session.get_torrent_store_dir()) if self.session.get_enable_metadata(): from Tribler.Core.leveldbstore import LevelDbStore self.metadata_store = LevelDbStore(self.session.get_metadata_store_dir()) # torrent collecting: RemoteTorrentHandler if self.session.get_torrent_collecting(): from Tribler.Core.RemoteTorrentHandler import RemoteTorrentHandler self.rtorrent_handler = RemoteTorrentHandler(self.session) # TODO(emilon): move this to a megacache component or smth if self.session.get_megacache(): from Tribler.Core.CacheDB.SqliteCacheDBHandler import (PeerDBHandler, TorrentDBHandler, MyPreferenceDBHandler, VoteCastDBHandler, ChannelCastDBHandler) from Tribler.Category.Category import Category self._logger.debug('tlm: Reading Session state from %s', self.session.get_state_dir()) self.cat = Category.getInstance(self.session) # create DBHandlers self.peer_db = PeerDBHandler(self.session) self.torrent_db = TorrentDBHandler(self.session) self.mypref_db = MyPreferenceDBHandler(self.session) self.votecast_db = VoteCastDBHandler(self.session) self.channelcast_db = ChannelCastDBHandler(self.session) # initializes DBHandlers self.peer_db.initialize() self.torrent_db.initialize() self.mypref_db.initialize() self.votecast_db.initialize() self.channelcast_db.initialize() from Tribler.Core.Modules.tracker_manager import TrackerManager self.tracker_manager = TrackerManager(self.session) self.tracker_manager.initialize() if self.session.get_videoplayer(): self.videoplayer = VideoPlayer(self.session) # Dispersy self.session.dispersy_member = None self.tftp_handler = None if self.session.get_dispersy(): from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import StandaloneEndpoint # set communication endpoint endpoint = StandaloneEndpoint(self.session.get_dispersy_port(), ip=self.session.get_ip()) working_directory = unicode(self.session.get_state_dir()) self.dispersy = Dispersy(endpoint, working_directory) # register TFTP service from Tribler.Core.TFTP.handler import TftpHandler self.tftp_handler = TftpHandler(self.session, endpoint, "fffffffd".decode('hex'), block_size=1024) self.tftp_handler.initialize() if self.session.get_enable_torrent_search() or self.session.get_enable_channel_search(): self.search_manager = SearchManager(self.session) self.search_manager.initialize() if self.session.get_enable_channel_search(): from Tribler.Core.Modules.channel_manager import ChannelManager self.channel_manager = ChannelManager(self.session) self.channel_manager.initialize() if not self.initComplete: self.init(autoload_discovery)
def register(self, session, session_lock): assert isInIOThread() if not self.registered: self.registered = True self.session = session self.session_lock = session_lock # On Mac, we bundle the root certificate for the SSL validation since Twisted is not using the root # certificates provided by the system trust store. if sys.platform == 'darwin': os.environ['SSL_CERT_FILE'] = os.path.join( get_lib_path(), 'root_certs_mac.pem') if self.session.config.get_torrent_store_enabled(): from Tribler.Core.leveldbstore import LevelDbStore self.torrent_store = LevelDbStore( self.session.config.get_torrent_store_dir()) if self.session.config.get_metadata_enabled(): from Tribler.Core.leveldbstore import LevelDbStore self.metadata_store = LevelDbStore( self.session.config.get_metadata_store_dir()) # torrent collecting: RemoteTorrentHandler if self.session.config.get_torrent_collecting_enabled(): from Tribler.Core.RemoteTorrentHandler import RemoteTorrentHandler self.rtorrent_handler = RemoteTorrentHandler(self.session) # TODO(emilon): move this to a megacache component or smth if self.session.config.get_megacache_enabled(): from Tribler.Core.CacheDB.SqliteCacheDBHandler import ( PeerDBHandler, TorrentDBHandler, MyPreferenceDBHandler, VoteCastDBHandler, ChannelCastDBHandler) from Tribler.Core.Category.Category import Category self._logger.debug('tlm: Reading Session state from %s', self.session.config.get_state_dir()) self.category = Category() # create DBHandlers self.peer_db = PeerDBHandler(self.session) self.torrent_db = TorrentDBHandler(self.session) self.mypref_db = MyPreferenceDBHandler(self.session) self.votecast_db = VoteCastDBHandler(self.session) self.channelcast_db = ChannelCastDBHandler(self.session) # initializes DBHandlers self.peer_db.initialize() self.torrent_db.initialize() self.mypref_db.initialize() self.votecast_db.initialize() self.channelcast_db.initialize() from Tribler.Core.Modules.tracker_manager import TrackerManager self.tracker_manager = TrackerManager(self.session) if self.session.config.get_video_server_enabled(): self.video_server = VideoServer( self.session.config.get_video_server_port(), self.session) self.video_server.start() # Dispersy self.tftp_handler = None if self.session.config.get_dispersy_enabled(): from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import StandaloneEndpoint # set communication endpoint endpoint = StandaloneEndpoint( self.session.config.get_dispersy_port()) working_directory = unicode( self.session.config.get_state_dir()) self.dispersy = Dispersy(endpoint, working_directory) # register TFTP service from Tribler.Core.TFTP.handler import TftpHandler self.tftp_handler = TftpHandler(self.session, endpoint, "fffffffd".decode('hex'), block_size=1024) self.tftp_handler.initialize() if self.session.config.get_torrent_search_enabled( ) or self.session.config.get_channel_search_enabled(): self.search_manager = SearchManager(self.session) self.search_manager.initialize() if not self.initComplete: self.init() self.session.add_observer(self.on_tribler_started, NTFY_TRIBLER, [NTFY_STARTED]) self.session.notifier.notify(NTFY_TRIBLER, NTFY_STARTED, None) return self.startup_deferred
from Tribler.dispersy.script import ScriptBase from Tribler.dispersy.member import Member from Tribler.dispersy.callback import Callback from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import StandaloneEndpoint with open(BOOTSTRAPTRIBLER_FILE, "w") as f: print >> f, args.tracker_ip, args.tracker_port # start in-memory Dispersy callback = Callback('Dispersy') if args.listen != None: endpoint = StandaloneEndpoint(randint(10000, 20000), args.listen) else: endpoint = StandaloneEndpoint(randint(10000, 20000)) dispersy = Dispersy(callback, endpoint, u'.') dispersy.start() logger.info("Dispersy is listening on " + repr(dispersy.lan_address)) callback.call(create_bc3_community, (dispersy,)) # wait # TODO(vladum): Wait until community is created (or just some time). # try: # while callback.is_running: # time.sleep(5.0) # except KeyboardInterrupt: # logger.info("shutting down") # finally: dispersy.stop() # cleanup filesystem
def __init__(self): working_directory = Dispersy.get_instance().working_directory super(BarterDatabase, self).__init__(path.join(working_directory, u"barter.db"))
class TestMultichainStatsEndpoint(AbstractApiTest): @blocking_call_on_reactor_thread @inlineCallbacks def setUp(self, autoload_discovery=True): yield super(TestMultichainStatsEndpoint, self).setUp(autoload_discovery=autoload_discovery) self.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) self.dispersy._database.open() master_member = DummyMember(self.dispersy, 1, "a" * 20) self.member = self.dispersy.get_new_member(u"curve25519") self.mc_community = MultiChainCommunity(self.dispersy, master_member, self.member) self.dispersy.get_communities = lambda: [self.mc_community] self.session.get_dispersy_instance = lambda: self.dispersy @deferred(timeout=10) def test_get_circuit_no_community(self): """ Testing whether the API returns error 404 if no multichain community is loaded """ self.dispersy.get_communities = lambda: [] return self.do_request('multichain/statistics', expected_code=404) @deferred(timeout=10) def test_get_statistics(self): """ Testing whether the API returns the correct statistics """ mock_block = MockObject() mock_block.public_key_requester = self.member.public_key mock_block.public_key_responder = "deadbeef".decode("HEX") mock_block.up = 42 mock_block.down = 8 mock_block.total_up_requester = 1024 mock_block.total_down_requester = 2048 mock_block.sequence_number_requester = 3 mock_block.previous_hash_requester = "cafebabe".decode("HEX") mock_block.hash_requester = "b19b00b5".decode("HEX") mock_block.signature_requester = "deadbabe".decode("HEX") mock_block.total_up_responder = 512 mock_block.total_down_responder = 256 mock_block.sequence_number_responder = 15 mock_block.previous_hash_responder = "cafef00d".decode("HEX") mock_block.hash_responder = "baadf00d".decode("HEX") mock_block.signature_responder = "deadf00d".decode("HEX") self.mc_community.persistence.add_block(mock_block) def verify_response(response): response_json = json.loads(response) self.assertTrue("statistics" in response_json) stats = response_json["statistics"] self.assertEqual( stats["self_id"], base64.encodestring(self.member.public_key).strip()) self.assertEqual(stats["self_total_blocks"], 3) self.assertEqual(stats["self_total_up_mb"], 1024) self.assertEqual(stats["self_total_down_mb"], 2048) self.assertNotEqual(stats["latest_block_insert_time"], "") self.assertEqual( stats["latest_block_id"], base64.encodestring("b19b00b5".decode("HEX")).strip()) self.assertEqual( stats["latest_block_requester_id"], base64.encodestring(self.member.public_key).strip()) self.assertEqual( stats["latest_block_responder_id"], base64.encodestring("deadbeef".decode("HEX")).strip()) self.assertEqual(stats["latest_block_up_mb"], "42") self.assertEqual(stats["latest_block_down_mb"], "8") self.should_check_equality = False return self.do_request('multichain/statistics', expected_code=200).addCallback(verify_response) @deferred(timeout=10) def test_get_statistics_no_data(self): """ Testing whether the API returns the correct statistics """ def verify_response(response): response_json = json.loads(response) self.assertTrue("statistics" in response_json) stats = response_json["statistics"] self.assertEqual( stats["self_id"], base64.encodestring(self.member.public_key).strip()) self.assertEqual(stats["self_total_blocks"], -1) self.assertEqual(stats["self_total_up_mb"], 0) self.assertEqual(stats["self_total_down_mb"], 0) self.assertEqual(stats["latest_block_insert_time"], "") self.assertEqual(stats["latest_block_id"], "") self.assertEqual(stats["latest_block_requester_id"], "") self.assertEqual(stats["latest_block_responder_id"], "") self.assertEqual(stats["latest_block_up_mb"], "") self.assertEqual(stats["latest_block_down_mb"], "") self.should_check_equality = False return self.do_request('multichain/statistics', expected_code=200).addCallback(verify_response)
class TriblerLaunchMany(Thread): def __init__(self): """ Called only once (unless we have multiple Sessions) by MainThread """ Thread.__init__(self) self.setDaemon(True) self.setName("Network" + self.getName()) self.initComplete = False self.registered = False self.dispersy = None self.database_thread = None def register(self, session, sesslock): if not self.registered: self.registered = True self.session = session self.sesslock = sesslock self.downloads = {} config = session.sessconfig # Should be safe at startup self.upnp_ports = [] # Orig self.sessdoneflag = Event() self.rawserver = RawServer(self.sessdoneflag, config['timeout_check_interval'], config['timeout'], ipv6_enable=config['ipv6_enabled'], failfunc=self.rawserver_fatalerrorfunc, errorfunc=self.rawserver_nonfatalerrorfunc) self.rawserver.add_task(self.rawserver_keepalive, 1) self.listen_port = config['minport'] self.shutdownstarttime = None self.multihandler = MultiHandler(self.rawserver, self.sessdoneflag) # SWIFTPROC swift_exists = config['swiftproc'] and (os.path.exists(config['swiftpath']) or os.path.exists(config['swiftpath'] + '.exe')) if swift_exists: from Tribler.Core.Swift.SwiftProcessMgr import SwiftProcessMgr self.spm = SwiftProcessMgr(config['swiftpath'], config['swiftcmdlistenport'], config['swiftdlsperproc'], self.session.get_swift_tunnel_listen_port(), self.sesslock) try: self.swift_process = self.spm.get_or_create_sp(self.session.get_swift_working_dir(), self.session.get_torrent_collecting_dir(), self.session.get_swift_tunnel_listen_port(), self.session.get_swift_tunnel_httpgw_listen_port(), self.session.get_swift_tunnel_cmdgw_listen_port()) self.upnp_ports.append((self.session.get_swift_tunnel_listen_port(), 'UDP')) except OSError: # could not find/run swift print >> sys.stderr, "lmc: could not start a swift process" else: self.spm = None self.swift_process = None # Dispersy self.session.dispersy_member = None if config['dispersy']: from Tribler.dispersy.callback import Callback from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import RawserverEndpoint, TunnelEndpoint from Tribler.dispersy.community import HardKilledCommunity # set communication endpoint if config['dispersy-tunnel-over-swift'] and self.swift_process: endpoint = TunnelEndpoint(self.swift_process) else: endpoint = RawserverEndpoint(self.rawserver, config['dispersy_port']) callback = Callback("Dispersy") # WARNING NAME SIGNIFICANT working_directory = unicode(config['state_dir']) self.dispersy = Dispersy(callback, endpoint, working_directory) # TODO: see if we can postpone dispersy.start to improve GUI responsiveness. # However, for now we must start self.dispersy.callback before running # try_register(nocachedb, self.database_thread)! self.dispersy.start() print >> sys.stderr, "lmc: Dispersy is listening on port", self.dispersy.wan_address[1], "using", endpoint self.upnp_ports.append((self.dispersy.wan_address[1], 'UDP')) self.dispersy.callback.call(self.dispersy.define_auto_load, args=(HardKilledCommunity,), kargs={'load': True}) # notify dispersy finished loading self.session.uch.notify(NTFY_DISPERSY, NTFY_STARTED, None) from Tribler.Core.permid import read_keypair from Tribler.dispersy.crypto import ec_to_public_bin, ec_to_private_bin keypair = read_keypair(self.session.get_permid_keypair_filename()) self.session.dispersy_member = callback.call(self.dispersy.get_member, (ec_to_public_bin(keypair), ec_to_private_bin(keypair))) self.database_thread = callback else: class FakeCallback(): def __init__(self): from Tribler.Utilities.TimedTaskQueue import TimedTaskQueue self.queue = TimedTaskQueue("FakeCallback") def register(self, call, args=(), kargs=None, delay=0.0, priority=0, id_=u"", callback=None, callback_args=(), callback_kargs=None, include_id=False): def do_task(): if kargs: call(*args, **kargs) else: call(*args) if callback: if callback_kargs: callback(*callback_args, **callback_kargs) else: callback(*callback_args) self.queue.add_task(do_task, t=delay) def shutdown(self, immediately=False): self.queue.shutdown(immediately) self.database_thread = FakeCallback() if config['megacache']: import Tribler.Core.CacheDB.cachedb as cachedb from Tribler.Core.CacheDB.SqliteCacheDBHandler import PeerDBHandler, TorrentDBHandler, MyPreferenceDBHandler, VoteCastDBHandler, ChannelCastDBHandler, NetworkBuzzDBHandler, UserEventLogDBHandler from Tribler.Category.Category import Category from Tribler.Core.Tag.Extraction import TermExtraction from Tribler.Core.CacheDB.sqlitecachedb import try_register if DEBUG: print >> sys.stderr, 'tlm: Reading Session state from', config['state_dir'] nocachedb = cachedb.init(config, self.rawserver_fatalerrorfunc) try_register(nocachedb, self.database_thread) self.cat = Category.getInstance(config['install_dir']) self.term = TermExtraction.getInstance(config['install_dir']) self.peer_db = PeerDBHandler.getInstance() self.peer_db.registerConnectionUpdater(self.session) self.torrent_db = TorrentDBHandler.getInstance() self.torrent_db.register(os.path.abspath(config['torrent_collecting_dir'])) self.mypref_db = MyPreferenceDBHandler.getInstance() self.votecast_db = VoteCastDBHandler.getInstance() self.votecast_db.registerSession(self.session) self.channelcast_db = ChannelCastDBHandler.getInstance() self.channelcast_db.registerSession(self.session) self.nb_db = NetworkBuzzDBHandler.getInstance() self.ue_db = UserEventLogDBHandler.getInstance() if self.dispersy: self.dispersy.database.attach_commit_callback(self.channelcast_db._db.commitNow) else: config['torrent_checking'] = 0 self.rtorrent_handler = None if config['torrent_collecting']: from Tribler.Core.RemoteTorrentHandler import RemoteTorrentHandler self.rtorrent_handler = RemoteTorrentHandler() def init(self): config = self.session.sessconfig # Should be safe at startup self.mainline_dht = None if config['mainline_dht']: from Tribler.Core.DecentralizedTracking import mainlineDHT try: self.mainline_dht = mainlineDHT.init(('127.0.0.1', config['mainline_dht_port']), config['state_dir'], config['swiftdhtport']) self.upnp_ports.append((config['mainline_dht_port'], 'UDP')) except: print_exc() self.ltmgr = None if config['libtorrent']: from Tribler.Core.Libtorrent.LibtorrentMgr import LibtorrentMgr self.ltmgr = LibtorrentMgr(self.session, ignore_singleton=self.session.ignore_singleton) # add task for tracker checking self.torrent_checking = None if config['torrent_checking']: if config['mainline_dht']: # Create torrent-liveliness checker based on DHT from Tribler.Core.DecentralizedTracking.mainlineDHTChecker import mainlineDHTChecker c = mainlineDHTChecker.getInstance() c.register(self.mainline_dht) try: from Tribler.TrackerChecking.TorrentChecking import TorrentChecking self.torrent_checking_period = config['torrent_checking_period'] self.torrent_checking = TorrentChecking.getInstance(self.torrent_checking_period) self.run_torrent_check() except: print_exc if self.rtorrent_handler: self.rtorrent_handler.register(self.dispersy, self.database_thread, self.session, int(config['torrent_collecting_max_torrents'])) self.initComplete = True def add(self, tdef, dscfg, pstate=None, initialdlstatus=None, commit=True, setupDelay=0, hidden=False): """ Called by any thread """ d = None self.sesslock.acquire() try: if not isinstance(tdef, TorrentDefNoMetainfo) and not tdef.is_finalized(): raise ValueError("TorrentDef not finalized") infohash = tdef.get_infohash() # Check if running or saved on disk if infohash in self.downloads: raise DuplicateDownloadException() from Tribler.Core.Libtorrent.LibtorrentDownloadImpl import LibtorrentDownloadImpl d = LibtorrentDownloadImpl(self.session, tdef) if pstate is None and not tdef.get_live(): # not already resuming pstate = self.load_download_pstate_noexc(infohash) if pstate is not None: if DEBUG: print >> sys.stderr, "tlm: add: pstate is", dlstatus_strings[pstate['dlstate']['status']], pstate['dlstate']['progress'] # Store in list of Downloads, always. self.downloads[infohash] = d d.setup(dscfg, pstate, initialdlstatus, self.network_engine_wrapper_created_callback, self.network_vod_event_callback, wrapperDelay=setupDelay) finally: self.sesslock.release() if d and not hidden and self.session.get_megacache(): def write_my_pref(): torrent_id = self.torrent_db.getTorrentID(infohash) data = {'destination_path': d.get_dest_dir()} self.mypref_db.addMyPreference(torrent_id, data, commit=commit) if isinstance(tdef, TorrentDefNoMetainfo): self.torrent_db.addInfohash(tdef.get_infohash(), commit=commit) self.torrent_db.updateTorrent(tdef.get_infohash(), name=tdef.get_name().encode('utf_8'), commit=commit) write_my_pref() elif self.rtorrent_handler: self.rtorrent_handler.save_torrent(tdef, write_my_pref) else: self.torrent_db.addExternalTorrent(tdef, source='', extra_info={'status': 'good'}, commit=commit) write_my_pref() return d def network_engine_wrapper_created_callback(self, d, pstate): """ Called by network thread """ try: if pstate is None: # Checkpoint at startup (infohash, pstate) = d.network_checkpoint() self.save_download_pstate(infohash, pstate) except: print_exc() def remove(self, d, removecontent=False, removestate=True, hidden=False): """ Called by any thread """ self.sesslock.acquire() try: d.stop_remove(removestate=removestate, removecontent=removecontent) infohash = d.get_def().get_infohash() if infohash in self.downloads: del self.downloads[infohash] finally: self.sesslock.release() if not hidden: self.remove_id(infohash) def remove_id(self, hash): # this is a bit tricky, as we do not know if this "id" is a roothash or infohash # however a restart will re-add the preference to mypreference if we remove the wrong one def do_db(torrent_db, mypref_db, hash): torrent_id = self.torrent_db.getTorrentID(hash) if torrent_id: self.mypref_db.updateDestDir(torrent_id, "") torrent_id = self.torrent_db.getTorrentIDRoot(hash) if torrent_id: self.mypref_db.updateDestDir(torrent_id, "") if self.session.get_megacache(): self.database_thread.register(do_db, args=(self.torrent_db, self.mypref_db, hash), priority=1024) def get_downloads(self): """ Called by any thread """ self.sesslock.acquire() try: return self.downloads.values() # copy, is mutable finally: self.sesslock.release() def get_download(self, hash): """ Called by any thread """ self.sesslock.acquire() try: return self.downloads.get(hash, None) finally: self.sesslock.release() def download_exists(self, infohash): self.sesslock.acquire() try: return infohash in self.downloads finally: self.sesslock.release() def rawserver_fatalerrorfunc(self, e): """ Called by network thread """ if DEBUG: print >> sys.stderr, "tlm: RawServer fatal error func called", e print_exc() def rawserver_nonfatalerrorfunc(self, e): """ Called by network thread """ if DEBUG: print >> sys.stderr, "tlm: RawServer non fatal error func called", e print_exc() # Could log this somewhere, or phase it out def _run(self): """ Called only once by network thread """ try: try: self.start_upnp() self.multihandler.listen_forever() except: print_exc() finally: self.stop_upnp() self.rawserver.shutdown() def rawserver_keepalive(self): """ Hack to prevent rawserver sleeping in select() for a long time, not processing any tasks on its queue at startup time Called by network thread """ self.rawserver.add_task(self.rawserver_keepalive, 1) # # State retrieval # def set_download_states_callback(self, usercallback, getpeerlist, when=0.0): """ Called by any thread """ self.sesslock.acquire() try: # Even if the list of Downloads changes in the mean time this is # no problem. For removals, dllist will still hold a pointer to the # Download, and additions are no problem (just won't be included # in list of states returned via callback. # dllist = self.downloads.values() finally: self.sesslock.release() for d in dllist: # Arno, 2012-05-23: At Niels' request to get total transferred # stats. Causes MOREINFO message to be sent from swift proc # for every initiated dl. # 2012-07-31: Turn MOREINFO on/off on demand for efficiency. # 2013-04-17: Libtorrent now uses set_moreinfo_stats as well. d.set_moreinfo_stats(True in getpeerlist or d.get_def().get_id() in getpeerlist) network_set_download_states_callback_lambda = lambda: self.network_set_download_states_callback(usercallback) self.rawserver.add_task(network_set_download_states_callback_lambda, when) def network_set_download_states_callback(self, usercallback): """ Called by network thread """ self.sesslock.acquire() try: # Even if the list of Downloads changes in the mean time this is # no problem. For removals, dllist will still hold a pointer to the # Download, and additions are no problem (just won't be included # in list of states returned via callback. # dllist = self.downloads.values() finally: self.sesslock.release() dslist = [] for d in dllist: try: ds = d.network_get_state(None, False, sessioncalling=True) dslist.append(ds) except: # Niels, 2012-10-18: If Swift connection is crashing, it will raise an exception # We're catching it here to continue building the downloadstates print_exc() # Invoke the usercallback function via a new thread. # After the callback is invoked, the return values will be passed to # the returncallback for post-callback processing. self.session.uch.perform_getstate_usercallback(usercallback, dslist, self.sesscb_set_download_states_returncallback) def sesscb_set_download_states_returncallback(self, usercallback, when, newgetpeerlist): """ Called by SessionCallbackThread """ if when > 0.0: # reschedule self.set_download_states_callback(usercallback, newgetpeerlist, when=when) # # Persistence methods # def load_checkpoint(self, initialdlstatus=None, initialdlstatus_dict={}): """ Called by any thread """ if not self.initComplete: network_load_checkpoint_callback_lambda = lambda: self.load_checkpoint(initialdlstatus, initialdlstatus_dict) self.rawserver.add_task(network_load_checkpoint_callback_lambda, 1.0) else: self.sesslock.acquire() filelist = [] try: dir = self.session.get_downloads_pstate_dir() filelist = os.listdir(dir) filelist = [os.path.join(dir, filename) for filename in filelist if filename.endswith('.pickle')] finally: self.sesslock.release() for i, filename in enumerate(filelist): shouldCommit = i + 1 == len(filelist) self.resume_download(filename, initialdlstatus, initialdlstatus_dict, commit=shouldCommit, setupDelay=i * 0.1) def load_download_pstate_noexc(self, infohash): """ Called by any thread, assume sesslock already held """ try: dir = self.session.get_downloads_pstate_dir() basename = binascii.hexlify(infohash) + '.pickle' filename = os.path.join(dir, basename) return self.load_download_pstate(filename) except Exception as e: # TODO: remove saved checkpoint? # self.rawserver_nonfatalerrorfunc(e) return None def resume_download(self, filename, initialdlstatus=None, initialdlstatus_dict={}, commit=True, setupDelay=0): tdef = sdef = dscfg = pstate = None try: pstate = self.load_download_pstate(filename) # SWIFTPROC if SwiftDef.is_swift_url(pstate['metainfo']): sdef = SwiftDef.load_from_url(pstate['metainfo']) elif 'infohash' in pstate['metainfo']: tdef = TorrentDefNoMetainfo(pstate['metainfo']['infohash'], pstate['metainfo']['name']) else: tdef = TorrentDef.load_from_dict(pstate['metainfo']) dlconfig = pstate['dlconfig'] if isinstance(dlconfig['saveas'], tuple): dlconfig['saveas'] = dlconfig['saveas'][-1] if sdef and 'name' in dlconfig and isinstance(dlconfig['name'], basestring): sdef.set_name(dlconfig['name']) if sdef and sdef.get_tracker().startswith("127.0.0.1:"): current_port = int(sdef.get_tracker().split(":")[1]) if current_port != self.session.get_swift_dht_listen_port(): print >> sys.stderr, "Modified SwiftDef to new tracker port" sdef.set_tracker("127.0.0.1:%d" % self.session.get_swift_dht_listen_port()) dscfg = DownloadStartupConfig(dlconfig) except: print_exc() # pstate is invalid or non-existing _, file = os.path.split(filename) infohash = binascii.unhexlify(file[:-7]) torrent = self.torrent_db.getTorrent(infohash, keys=['name', 'torrent_file_name', 'swift_torrent_hash'], include_mypref=False) torrentfile = None if torrent: torrent_dir = self.session.get_torrent_collecting_dir() if torrent['swift_torrent_hash']: sdef = SwiftDef(torrent['swift_torrent_hash']) save_name = sdef.get_roothash_as_hex() torrentfile = os.path.join(torrent_dir, save_name) if torrentfile and os.path.isfile(torrentfile): # normal torrentfile is not present, see if readable torrent is there save_name = get_readable_torrent_name(infohash, torrent['name']) torrentfile = os.path.join(torrent_dir, save_name) if torrentfile and os.path.isfile(torrentfile): tdef = TorrentDef.load(torrentfile) defaultDLConfig = DefaultDownloadStartupConfig.getInstance() dscfg = defaultDLConfig.copy() if self.mypref_db != None: preferences = self.mypref_db.getMyPrefStatsInfohash(infohash) if preferences: if os.path.isdir(preferences[2]) or preferences[2] == '': dscfg.set_dest_dir(preferences[2]) if DEBUG: print >> sys.stderr, "tlm: load_checkpoint: pstate is", dlstatus_strings[pstate['dlstate']['status']], pstate['dlstate']['progress'] if pstate['engineresumedata'] is None: print >> sys.stderr, "tlm: load_checkpoint: resumedata None" else: print >> sys.stderr, "tlm: load_checkpoint: resumedata len", len(pstate['engineresumedata']) if (tdef or sdef) and dscfg: if dscfg.get_dest_dir() != '': # removed torrent ignoring try: if not self.download_exists((tdef or sdef).get_id()): if tdef: initialdlstatus = initialdlstatus_dict.get(tdef.get_id(), initialdlstatus) self.add(tdef, dscfg, pstate, initialdlstatus, commit=commit, setupDelay=setupDelay) else: initialdlstatus = initialdlstatus_dict.get(sdef.get_id(), initialdlstatus) self.swift_add(sdef, dscfg, pstate, initialdlstatus) else: print >> sys.stderr, "tlm: not resuming checkpoint because download has already been added" except Exception as e: self.rawserver_nonfatalerrorfunc(e) else: print >> sys.stderr, "tlm: removing checkpoint", filename, "destdir is", dscfg.get_dest_dir() os.remove(filename) else: print >> sys.stderr, "tlm: could not resume checkpoint", filename, tdef, dscfg def checkpoint(self, stop=False, checkpoint=True, gracetime=2.0): """ Called by any thread, assume sesslock already held """ # Even if the list of Downloads changes in the mean time this is # no problem. For removals, dllist will still hold a pointer to the # Download, and additions are no problem (just won't be included # in list of states returned via callback. # dllist = self.downloads.values() if DEBUG or stop: print >> sys.stderr, "tlm: checkpointing", len(dllist), "stopping", stop network_checkpoint_callback_lambda = lambda: self.network_checkpoint_callback(dllist, stop, checkpoint, gracetime) self.rawserver.add_task(network_checkpoint_callback_lambda, 0.0) # TODO: checkpoint overlayapps / friendship msg handler def network_checkpoint_callback(self, dllist, stop, checkpoint, gracetime): """ Called by network thread """ if checkpoint: for d in dllist: try: # Tell all downloads to stop, and save their persistent state # in a infohash -> pstate dict which is then passed to the user # for storage. # if stop: (infohash, pstate) = d.network_stop(False, False) else: (infohash, pstate) = d.network_checkpoint() if DEBUG: print >> sys.stderr, "tlm: network checkpointing:", d.get_def().get_name(), pstate self.save_download_pstate(infohash, pstate) except Exception as e: self.rawserver_nonfatalerrorfunc(e) if stop: # Some grace time for early shutdown tasks if self.shutdownstarttime is not None: now = timemod.time() diff = now - self.shutdownstarttime if diff < gracetime: print >> sys.stderr, "tlm: shutdown: delaying for early shutdown tasks", gracetime - diff delay = gracetime - diff network_shutdown_callback_lambda = lambda: self.network_shutdown() self.rawserver.add_task(network_shutdown_callback_lambda, delay) return self.network_shutdown() def early_shutdown(self): """ Called as soon as Session shutdown is initiated. Used to start shutdown tasks that takes some time and that can run in parallel to checkpointing, etc. """ print >> sys.stderr, "tlm: early_shutdown" # Note: sesslock not held self.shutdownstarttime = timemod.time() if self.rtorrent_handler: self.rtorrent_handler.shutdown() self.rtorrent_handler.delInstance() if self.torrent_checking: self.torrent_checking.shutdown() self.torrent_checking.delInstance() if self.dispersy: print >> sys.stderr, "lmc: Dispersy shutdown", "[%d]" % id(self.dispersy) self.dispersy.stop(666.666) else: self.database_thread.shutdown(True) if self.session.get_megacache(): self.peer_db.delInstance() self.torrent_db.delInstance() self.mypref_db.delInstance() self.votecast_db.delInstance() self.channelcast_db.delInstance() self.nb_db.delInstance() self.ue_db.delInstance() self.cat.delInstance() self.term.delInstance() from Tribler.Core.CacheDB.sqlitecachedb import unregister unregister() # SWIFTPROC if self.spm is not None: self.spm.early_shutdown() if self.mainline_dht: from Tribler.Core.DecentralizedTracking import mainlineDHT mainlineDHT.deinit(self.mainline_dht) def network_shutdown(self): try: print >> sys.stderr, "tlm: network_shutdown" # Arno, 2012-07-04: Obsolete, each thread must close the DBHandler # it uses in its own shutdown procedure. There is no global close # of all per-thread cursors/connections. # # cachedb.done() # SWIFTPROC if self.spm is not None: self.spm.network_shutdown() ts = enumerate_threads() print >> sys.stderr, "tlm: Number of threads still running", len(ts) for t in ts: print >> sys.stderr, "tlm: Thread still running", t.getName(), "daemon", t.isDaemon(), "instance:", t except: print_exc() # Stop network thread self.sessdoneflag.set() # Arno, 2010-08-09: Stop Session pool threads only after gracetime self.session.uch.shutdown() # Shutdown libtorrent session after checkpoints have been made if self.ltmgr: self.ltmgr.shutdown() self.ltmgr.delInstance() def save_download_pstate(self, infohash, pstate): """ Called by network thread """ basename = binascii.hexlify(infohash) + '.pickle' filename = os.path.join(self.session.get_downloads_pstate_dir(), basename) if DEBUG: print >> sys.stderr, "tlm: network checkpointing: to file", filename f = open(filename, "wb") pickle.dump(pstate, f) f.close() def load_download_pstate(self, filename): """ Called by any thread """ f = open(filename, "rb") pstate = pickle.load(f) f.close() return pstate def run(self): if prctlimported: prctl.set_name("Tribler" + currentThread().getName()) if not self.initComplete: self.init() if PROFILE: fname = "profile-%s" % self.getName() import cProfile cProfile.runctx("self._run()", globals(), locals(), filename=fname) import pstats print >> sys.stderr, "profile: data for %s" % self.getName() pstats.Stats(fname, stream=sys.stderr).sort_stats("cumulative").print_stats(20) else: self._run() def start_upnp(self): if self.ltmgr: self.set_activity(NTFY_ACT_UPNP) for port, protocol in self.upnp_ports: if DEBUG: print >> sys.stderr, "tlm: adding upnp mapping for %d %s" % (port, protocol) self.ltmgr.add_mapping(port, protocol) def stop_upnp(self): if self.ltmgr: self.ltmgr.delete_mappings() # Events from core meant for API user # def dialback_reachable_callback(self): """ Called by overlay+network thread """ self.session.uch.notify(NTFY_REACHABLE, NTFY_INSERT, None, '') def set_activity(self, type, str='', arg2=None): """ Called by overlay + network thread """ # print >>sys.stderr,"tlm: set_activity",type,str,arg2 self.session.uch.notify(NTFY_ACTIVITIES, NTFY_INSERT, type, str, arg2) def network_vod_event_callback(self, videoinfo, event, params): """ Called by network thread """ if DEBUG: print >> sys.stderr, "tlm: network_vod_event_callback: event %s, params %s" % (event, params) # Call Session threadpool to call user's callback try: videoinfo['usercallback'](event, params) except: print_exc() def update_torrent_checking_period(self): # dynamically change the interval: update at least once per day if self.rtorrent_handler: ntorrents = self.rtorrent_handler.num_torrents if ntorrents > 0: self.torrent_checking_period = min(max(86400 / ntorrents, 30), 300) # print >> sys.stderr, "torrent_checking_period", self.torrent_checking_period def run_torrent_check(self): """ Called by network thread """ self.update_torrent_checking_period() self.rawserver.add_task(self.run_torrent_check, self.torrent_checking_period) try: self.torrent_checking.setInterval(self.torrent_checking_period) except Exception as e: print_exc() self.rawserver_nonfatalerrorfunc(e) # SWIFTPROC def swift_add(self, sdef, dscfg, pstate=None, initialdlstatus=None, hidden=False): """ Called by any thread """ d = None self.sesslock.acquire() try: if self.spm is None: raise OperationNotEnabledByConfigurationException() roothash = sdef.get_roothash() # Check if running or saved on disk if roothash in self.downloads: raise DuplicateDownloadException() from Tribler.Core.Swift.SwiftDownloadImpl import SwiftDownloadImpl d = SwiftDownloadImpl(self.session, sdef) # Store in list of Downloads, always. self.downloads[roothash] = d d.setup(dscfg, pstate, initialdlstatus, None, self.network_vod_event_callback) finally: self.sesslock.release() def do_db(torrent_db, mypref_db, roothash, sdef, d): torrent_id = torrent_db.addOrGetTorrentIDRoot(roothash, sdef.get_name()) # TODO: if user renamed the dest_path for single-file-torrent dest_path = d.get_dest_dir() data = {'destination_path': dest_path} mypref_db.addMyPreference(torrent_id, data) if d and not hidden and self.session.get_megacache(): self.database_thread.register(do_db, args=(self.torrent_db, self.mypref_db, roothash, sdef, d)) return d def swift_remove(self, d, removecontent=False, removestate=True, hidden=False): """ Called by any thread """ self.sesslock.acquire() try: # SWIFTPROC: remove before stop_remove, to ensure that content # removal works (for torrents, stopping is delegate to network # so all this code happens fast before actual removal. For swift not. roothash = d.get_def().get_roothash() if roothash in self.downloads: del self.downloads[roothash] d.stop_remove(True, removestate=removestate, removecontent=removecontent) finally: self.sesslock.release() def do_db(torrent_db, my_prefdb, roothash): torrent_id = self.torrent_db.getTorrentIDRoot(roothash) if torrent_id: self.mypref_db.updateDestDir(torrent_id, "") if not hidden and self.session.get_megacache(): self.database_thread.register(do_db, args=(self.torrent_db, self.mypref_db, roothash), priority=1024)
class DispersyExperimentScriptClient(ExperimentClient): scenario_file = None def __init__(self, vars): ExperimentClient.__init__(self, vars) self._dispersy = None self._community = None self._database_file = u"dispersy.db" self._dispersy_exit_status = None self._is_joined = False self._strict = True self.community_args = [] self.community_kwargs = {} self._stats_file = None self._reset_statistics = True def startExperiment(self): msg("Starting dummy scenario experiment") scenario_file_path = path.join(environ['EXPERIMENT_DIR'], self.scenario_file) self.scenario_runner = ScenarioRunner(scenario_file_path, int(self.my_id)) # TODO(emilon): Auto-register this stuff self.scenario_runner.register(self.echo) self.scenario_runner.register(self.online) self.scenario_runner.register(self.offline) self.scenario_runner.register(self.set_community_kwarg) self.scenario_runner.register(self.set_database_file) self.scenario_runner.register(self.use_memory_database) self.scenario_runner.register(self.set_ignore_exceptions) self.scenario_runner.register(self.start_dispersy) self.scenario_runner.register(self.stop_dispersy) self.scenario_runner.register(self.stop) self.scenario_runner.register(self.set_master_member) self.scenario_runner.register(self.reset_dispersy_statistics, 'reset_dispersy_statistics') self.scenario_runner.register(self.annotate) self.scenario_runner.register(self.peertype) # TODO(emilon): Move this to the right place # TODO(emilon): Do we want to have the .dbs in the output dirs or should they be dumped to /tmp? my_dir = path.join(environ['OUTPUT_DIR'], self.my_id) makedirs(my_dir) chdir(my_dir) self._stats_file = open("statistics.log", 'w') # TODO(emilon): Fix me or kill me try: symlink(path.join(environ['PROJECT_DIR'], 'tribler', 'bootstraptribler.txt'), 'bootstraptribler.txt') except OSError: pass self.registerCallbacks() self.scenario_runner.run() def registerCallbacks(self): pass # # Actions # def echo(self, *argv): msg("%s ECHO" % self.my_id, ' '.join(argv)) def set_community_args(self, args): """ Example: '1292333014,12923340000' """ self.community_args = args.split(',') def set_community_kwargs(self, kwargs): """ Example: 'startingtimestamp=1292333014,endingtimestamp=12923340000' """ for karg in kwargs.split(","): if "=" in karg: key, value = karg.split("=", 1) self.community_kwargs[key.strip()] = value.strip() def set_community_kwarg(self, key, value): self.community_kwargs[key] = value def set_database_file(self, filename): self._database_file = unicode(filename) def use_memory_database(self): self._database_file = u':memory:' def set_ignore_exceptions(self, boolean): self._strict = not self.str2bool(boolean) def start_dispersy(self): msg("Starting dispersy") # We need to import the stuff _AFTER_ configuring the logging stuff. from Tribler.dispersy.callback import Callback from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import StandaloneEndpoint self._dispersy = Dispersy(Callback("Dispersy"), StandaloneEndpoint(int(self.my_id) + 12000, '0.0.0.0'), u'.', self._database_file) self._dispersy.statistics.enable_debug_statistics(True) if self._strict: def exception_handler(exception, fatal): msg("An exception occurred. Quitting because we are running with --strict enabled.") print "Exception was:" try: raise exception except: from traceback import print_exc print_exc() # Set Dispersy's exit status to error self._dispersy_exit_status = 1 # Stop the experiment reactor.callLater(1, self.stop) return True self._dispersy.callback.attach_exception_handler(exception_handler) self._dispersy.start() # low (NID_sect233k1) isn't actually that low, switching to 160bits as this is comparable to rsa 1024 # http://www.nsa.gov/business/programs/elliptic_curve.shtml # speed difference when signing/verifying 100 items # NID_sect233k1 signing took 0.171 verify took 0.35 totals 0.521 # NID_secp160k1 signing took 0.04 verify took 0.04 totals 0.08 self._my_member = self._dispersy.callback.call(self._dispersy.get_new_member, (u"NID_secp160k1",)) self._master_member = self._dispersy.callback.call(self._dispersy.get_member, (self.master_key,)) self._dispersy.callback.register(self._do_log) msg("Finished starting dispersy") def stop_dispersy(self): def onDispersyStopped(result): self._dispersy_exit_status = result d = deferToThread(self._dispersy.stop) d.addCallback(onDispersyStopped) def stop(self, retry=3): retry = int(retry) if self._dispersy_exit_status is None and retry: reactor.callLater(1, self.stop, retry - 1) else: msg("Dispersy exit status was:", self._dispersy_exit_status) reactor.callLater(0, reactor.stop) def set_master_member(self, pub_key): self.master_key = pub_key.decode("HEX") @call_on_dispersy_thread def online(self): msg("Trying to go online") if self._community is None: msg("online") if self._is_joined: self._community = self.community_class.load_community(self._dispersy, self._master_member, *self.community_args, **self.community_kwargs) else: msg("join community %s as %s", self._master_member.mid.encode("HEX"), self._my_member.mid.encode("HEX")) self._community = self.community_class.join_community(self._dispersy, self._master_member, self._my_member, *self.community_args, **self.community_kwargs) self._community.auto_load = False self._is_joined = True self.print_on_change('community-kwargs', {}, self.community_kwargs) self.print_on_change('community-env', {}, {'pid':getpid()}) else: msg("online (we are already online)") @call_on_dispersy_thread def offline(self): if self._community is None: msg("offline (we are already offline)") else: msg("offline") for community in self._dispersy.get_communities(): community.unload_community() self._community = None @call_on_dispersy_thread def reset_dispersy_statistics(self): self._reset_statistics = True self._dispersy._statistics.reset() def annotate(self, message): self._stats_file.write('%f %s %s %s\n' % (time(), self.my_id, "annotate", message)) def peertype(self, peertype): self._stats_file.write('%f %s %s %s\n' % (time(), self.my_id, "peertype", peertype)) # # Aux. functions # def str2bool(self, v): return v.lower() in ("yes", "true", "t", "1") def str2tuple(self, v): if len(v) > 1 and v[1] == "t": return (int(v[0]), int(v[2:])) if len(v) > 1 and v[1] == ".": return float(v) return int(v) def print_on_change(self, name, prev_dict, cur_dict): new_values = {} changed_values = {} if cur_dict: for key, value in cur_dict.iteritems(): if not isinstance(key, (basestring, int, long, float)): key = str(key) if not isinstance(value, (basestring, int, long, float, Iterable)): value = str(value) new_values[key] = value if prev_dict.get(key, None) != value: changed_values[key] = value if changed_values: self._stats_file.write('%f %s %s %s\n' % (time(), self.my_id, name, json.dumps(changed_values))) self._stats_file.flush() return new_values return prev_dict def _do_log(self): from Tribler.dispersy.candidate import CANDIDATE_STUMBLE_LIFETIME stumbled_candidates = defaultdict(lambda:defaultdict(set)) while True: if self._reset_statistics: prev_statistics = {} prev_total_received = {} prev_total_dropped = {} prev_total_delayed = {} prev_total_outgoing = {} prev_total_fail = {} prev_endpoint_recv = {} prev_endpoint_send = {} prev_created_messages = {} prev_bootstrap_candidates = {} self._reset_statistics = False self._dispersy.statistics.update() communities_dict = [] for c in self._dispersy.statistics.communities: # we add all candidates which have a last_stumble > now - CANDIDATE_STUMBLE_LIFETIME now = time() for candidate in c._community.candidates.itervalues(): if candidate.last_stumble > now - CANDIDATE_STUMBLE_LIFETIME: mid = list(candidate.get_members())[0].mid stumbled_candidates[c.hex_cid][candidate.last_stumble].add(mid) nr_stumbled_candidates = sum(len(members) for members in stumbled_candidates[c.hex_cid].values()) communities_dict.append({'cid': c.hex_cid, 'classification': c.classification, 'global_time': c.global_time, 'sync_bloom_new': c.sync_bloom_new, 'sync_bloom_reuse': c.sync_bloom_reuse, 'sync_bloom_send': c.sync_bloom_send, 'sync_bloom_skip': c.sync_bloom_skip, 'nr_candidates': len(c.candidates) if c.candidates else 0, 'nr_stumbled_candidates': nr_stumbled_candidates}) statistics_dict = {'conn_type': self._dispersy.statistics.connection_type, 'received_count': self._dispersy.statistics.received_count, 'success_count': self._dispersy.statistics.success_count, 'drop_count': self._dispersy.statistics.drop_count, 'delay_count': self._dispersy.statistics.delay_count, 'delay_success': self._dispersy.statistics.delay_success, 'delay_timeout': self._dispersy.statistics.delay_timeout, 'delay_send': self._dispersy.statistics.delay_send, 'created_count': self._dispersy.statistics.created_count, 'total_up': self._dispersy.statistics.total_up, 'total_down': self._dispersy.statistics.total_down, 'total_send': self._dispersy.statistics.total_send, 'cur_sendqueue': self._dispersy.statistics.cur_sendqueue, 'total_candidates_discovered': self._dispersy.statistics.total_candidates_discovered, 'walk_attempt': self._dispersy.statistics.walk_attempt, 'walk_success': self._dispersy.statistics.walk_success, 'walk_bootstrap_attempt': self._dispersy.statistics.walk_bootstrap_attempt, 'walk_bootstrap_success': self._dispersy.statistics.walk_bootstrap_success, 'walk_reset': self._dispersy.statistics.walk_reset, 'walk_invalid_response_identifier': self._dispersy.statistics.walk_invalid_response_identifier, 'walk_advice_outgoing_request': self._dispersy.statistics.walk_advice_outgoing_request, 'walk_advice_incoming_response': self._dispersy.statistics.walk_advice_incoming_response, 'walk_advice_incoming_response_new': self._dispersy.statistics.walk_advice_incoming_response_new, 'walk_advice_incoming_request': self._dispersy.statistics.walk_advice_incoming_request, 'walk_advice_outgoing_response': self._dispersy.statistics.walk_advice_outgoing_response, 'communities': communities_dict} prev_statistics = self.print_on_change("statistics", prev_statistics, statistics_dict) prev_total_dropped = self.print_on_change("statistics-dropped-messages", prev_total_dropped, self._dispersy.statistics.drop) prev_total_delayed = self.print_on_change("statistics-delayed-messages", prev_total_delayed, self._dispersy.statistics.delay) prev_total_received = self.print_on_change("statistics-successful-messages", prev_total_received, self._dispersy.statistics.success) prev_total_outgoing = self.print_on_change("statistics-outgoing-messages", prev_total_outgoing, self._dispersy.statistics.outgoing) prev_created_messages = self.print_on_change("statistics-created-messages", prev_created_messages, self._dispersy.statistics.created) prev_total_fail = self.print_on_change("statistics-walk-fail", prev_total_fail, self._dispersy.statistics.walk_fail) prev_endpoint_recv = self.print_on_change("statistics-endpoint-recv", prev_endpoint_recv, self._dispersy.statistics.endpoint_recv) prev_endpoint_send = self.print_on_change("statistics-endpoint-send", prev_endpoint_send, self._dispersy.statistics.endpoint_send) prev_bootstrap_candidates = self.print_on_change("statistics-bootstrap-candidates", prev_bootstrap_candidates, self._dispersy.statistics.bootstrap_candidates) yield 1.0
class TriblerLaunchMany(TaskManager): def __init__(self): """ Called only once (unless we have multiple Sessions) by MainThread """ super(TriblerLaunchMany, self).__init__() self.initComplete = False self.registered = False self.dispersy = None self.ipv8 = None self.state_cb_count = 0 self.previous_active_downloads = [] self.download_states_lc = None self.get_peer_list = [] self._logger = logging.getLogger(self.__class__.__name__) self.downloads = {} self.upnp_ports = [] self.session = None self.session_lock = None self.sessdoneflag = Event() self.shutdownstarttime = None # modules self.torrent_store = None self.metadata_store = None self.rtorrent_handler = None self.tftp_handler = None self.api_manager = None self.watch_folder = None self.version_check_manager = None self.resource_monitor = None self.category = None self.peer_db = None self.torrent_db = None self.mypref_db = None self.votecast_db = None self.channelcast_db = None self.search_manager = None self.channel_manager = None self.video_server = None self.mainline_dht = None self.ltmgr = None self.tracker_manager = None self.torrent_checker = None self.tunnel_community = None self.triblerchain_community = None self.startup_deferred = Deferred() self.credit_mining_manager = None self.market_community = None def register(self, session, session_lock): assert isInIOThread() if not self.registered: self.registered = True self.session = session self.session_lock = session_lock # On Mac, we bundle the root certificate for the SSL validation since Twisted is not using the root # certificates provided by the system trust store. if sys.platform == 'darwin': os.environ['SSL_CERT_FILE'] = os.path.join( get_lib_path(), 'root_certs_mac.pem') if self.session.config.get_torrent_store_enabled(): from Tribler.Core.leveldbstore import LevelDbStore self.torrent_store = LevelDbStore( self.session.config.get_torrent_store_dir()) if not self.torrent_store.get_db(): raise RuntimeError( "Torrent store (leveldb) is None which should not normally happen" ) if self.session.config.get_metadata_enabled(): from Tribler.Core.leveldbstore import LevelDbStore self.metadata_store = LevelDbStore( self.session.config.get_metadata_store_dir()) if not self.metadata_store.get_db(): raise RuntimeError( "Metadata store (leveldb) is None which should not normally happen" ) # torrent collecting: RemoteTorrentHandler if self.session.config.get_torrent_collecting_enabled(): from Tribler.Core.RemoteTorrentHandler import RemoteTorrentHandler self.rtorrent_handler = RemoteTorrentHandler(self.session) # TODO(emilon): move this to a megacache component or smth if self.session.config.get_megacache_enabled(): from Tribler.Core.CacheDB.SqliteCacheDBHandler import ( PeerDBHandler, TorrentDBHandler, MyPreferenceDBHandler, VoteCastDBHandler, ChannelCastDBHandler) from Tribler.Core.Category.Category import Category self._logger.debug('tlm: Reading Session state from %s', self.session.config.get_state_dir()) self.category = Category() # create DBHandlers self.peer_db = PeerDBHandler(self.session) self.torrent_db = TorrentDBHandler(self.session) self.mypref_db = MyPreferenceDBHandler(self.session) self.votecast_db = VoteCastDBHandler(self.session) self.channelcast_db = ChannelCastDBHandler(self.session) # initializes DBHandlers self.peer_db.initialize() self.torrent_db.initialize() self.mypref_db.initialize() self.votecast_db.initialize() self.channelcast_db.initialize() from Tribler.Core.Modules.tracker_manager import TrackerManager self.tracker_manager = TrackerManager(self.session) if self.session.config.get_video_server_enabled(): self.video_server = VideoServer( self.session.config.get_video_server_port(), self.session) self.video_server.start() # IPv8 if self.session.config.get_ipv8_enabled(): from Tribler.pyipv8.ipv8.configuration import get_default_configuration ipv8_config = get_default_configuration() ipv8_config['port'] = self.session.config.get_dispersy_port() ipv8_config['address'] = self.session.config.get_ipv8_address() ipv8_config['overlays'] = [] ipv8_config['keys'] = [] # We load the keys ourselves if self.session.config.get_ipv8_bootstrap_override(): import Tribler.pyipv8.ipv8.deprecated.community as community_file community_file._DEFAULT_ADDRESSES = [ self.session.config.get_ipv8_bootstrap_override() ] community_file._DNS_ADDRESSES = [] self.ipv8 = IPv8(ipv8_config) self.session.config.set_anon_proxy_settings( 2, ("127.0.0.1", self.session.config. get_tunnel_community_socks5_listen_ports())) # Dispersy self.tftp_handler = None if self.session.config.get_dispersy_enabled(): from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import MIMEndpoint from Tribler.dispersy.endpoint import IPv8toDispersyAdapter # set communication endpoint if self.session.config.get_ipv8_enabled(): dispersy_endpoint = IPv8toDispersyAdapter( self.ipv8.endpoint) else: dispersy_endpoint = MIMEndpoint( self.session.config.get_dispersy_port()) working_directory = unicode( self.session.config.get_state_dir()) self.dispersy = Dispersy(dispersy_endpoint, working_directory) self.dispersy.statistics.enable_debug_statistics(False) # register TFTP service from Tribler.Core.TFTP.handler import TftpHandler self.tftp_handler = TftpHandler(self.session, dispersy_endpoint, "fffffffd".decode('hex'), block_size=1024) self.tftp_handler.initialize() # Torrent search if self.session.config.get_torrent_search_enabled( ) or self.session.config.get_channel_search_enabled(): self.search_manager = SearchManager(self.session) self.search_manager.initialize() if not self.initComplete: self.init() self.session.add_observer(self.on_tribler_started, NTFY_TRIBLER, [NTFY_STARTED]) self.session.notifier.notify(NTFY_TRIBLER, NTFY_STARTED, None) return self.startup_deferred def on_tribler_started(self, subject, changetype, objectID, *args): reactor.callFromThread(self.startup_deferred.callback, None) @blocking_call_on_reactor_thread def load_ipv8_overlays(self): # Discovery Community with open(self.session.config.get_permid_keypair_filename(), 'r') as key_file: content = key_file.read() content = content[31:-30].replace('\n', '').decode("BASE64") peer = Peer(M2CryptoSK(keystring=content)) discovery_community = DiscoveryCommunity(peer, self.ipv8.endpoint, self.ipv8.network) discovery_community.resolve_dns_bootstrap_addresses() self.ipv8.overlays.append(discovery_community) self.ipv8.strategies.append((RandomChurn(discovery_community), -1)) if not self.session.config.get_dispersy_enabled(): self.ipv8.strategies.append((RandomWalk(discovery_community), 20)) # TriblerChain Community if self.session.config.get_trustchain_enabled(): triblerchain_peer = Peer(self.session.trustchain_keypair) from Tribler.community.triblerchain.community import TriblerChainCommunity self.triblerchain_community = TriblerChainCommunity( triblerchain_peer, self.ipv8.endpoint, self.ipv8.network, tribler_session=self.session, working_directory=self.session.config.get_state_dir()) self.ipv8.overlays.append(self.triblerchain_community) self.ipv8.strategies.append( (EdgeWalk(self.triblerchain_community), 20)) # Tunnel Community if self.session.config.get_tunnel_community_enabled(): tunnel_peer = Peer(self.session.trustchain_keypair) from Tribler.community.triblertunnel.community import TriblerTunnelCommunity self.tunnel_community = TriblerTunnelCommunity( tunnel_peer, self.ipv8.endpoint, self.ipv8.network, tribler_session=self.session, dht_provider=MainlineDHTProvider( self.mainline_dht, self.session.config.get_dispersy_port()), triblerchain_community=self.triblerchain_community) self.ipv8.overlays.append(self.tunnel_community) self.ipv8.strategies.append( (RandomWalk(self.tunnel_community), 20)) # Market Community if self.session.config.get_market_community_enabled(): wallets = {} try: from Tribler.community.market.wallet.btc_wallet import BitcoinWallet, BitcoinTestnetWallet wallet_type = BitcoinTestnetWallet if self.session.config.get_btc_testnet( ) else BitcoinWallet btc_wallet = wallet_type( os.path.join(self.session.config.get_state_dir(), 'wallet')) wallets[btc_wallet.get_identifier()] = btc_wallet except ImportError: self._logger.error( "Electrum wallet cannot be found, Bitcoin trading not available!" ) mc_wallet = TrustchainWallet(self.triblerchain_community) wallets[mc_wallet.get_identifier()] = mc_wallet if self.session.config.get_dummy_wallets_enabled(): # For debugging purposes, we create dummy wallets dummy_wallet1 = DummyWallet1() wallets[dummy_wallet1.get_identifier()] = dummy_wallet1 dummy_wallet2 = DummyWallet2() wallets[dummy_wallet2.get_identifier()] = dummy_wallet2 from Tribler.community.market.community import MarketCommunity market_peer = Peer(self.session.tradechain_keypair) self.market_community = MarketCommunity( market_peer, self.ipv8.endpoint, self.ipv8.network, tribler_session=self.session, wallets=wallets, working_directory=self.session.config.get_state_dir()) self.ipv8.overlays.append(self.market_community) self.ipv8.strategies.append( (RandomWalk(self.market_community), 20)) @blocking_call_on_reactor_thread def load_dispersy_communities(self): self._logger.info("tribler: Preparing Dispersy communities...") now_time = timemod.time() default_kwargs = {'tribler_session': self.session} # Search Community if self.session.config.get_torrent_search_enabled() and self.dispersy: from Tribler.community.search.community import SearchCommunity self.dispersy.define_auto_load(SearchCommunity, self.session.dispersy_member, load=True, kargs=default_kwargs) # AllChannel Community if self.session.config.get_channel_search_enabled() and self.dispersy: from Tribler.community.allchannel.community import AllChannelCommunity self.dispersy.define_auto_load(AllChannelCommunity, self.session.dispersy_member, load=True, kargs=default_kwargs) # Channel Community if self.session.config.get_channel_community_enabled( ) and self.dispersy: from Tribler.community.channel.community import ChannelCommunity self.dispersy.define_auto_load(ChannelCommunity, self.session.dispersy_member, load=True, kargs=default_kwargs) # PreviewChannel Community if self.session.config.get_preview_channel_community_enabled( ) and self.dispersy: from Tribler.community.channel.preview import PreviewChannelCommunity self.dispersy.define_auto_load(PreviewChannelCommunity, self.session.dispersy_member, kargs=default_kwargs) self._logger.info("tribler: communities are ready in %.2f seconds", timemod.time() - now_time) def init(self): if self.dispersy: from Tribler.dispersy.community import HardKilledCommunity self._logger.info("lmc: Starting Dispersy...") self.session.readable_status = STATE_STARTING_DISPERSY now = timemod.time() success = self.dispersy.start(self.session.autoload_discovery) diff = timemod.time() - now if success: self._logger.info( "lmc: Dispersy started successfully in %.2f seconds [port: %d]", diff, self.dispersy.wan_address[1]) else: self._logger.info( "lmc: Dispersy failed to start in %.2f seconds", diff) self.upnp_ports.append((self.dispersy.wan_address[1], 'UDP')) from Tribler.dispersy.crypto import M2CryptoSK private_key = self.dispersy.crypto.key_to_bin( M2CryptoSK(filename=self.session.config. get_permid_keypair_filename())) self.session.dispersy_member = blockingCallFromThread( reactor, self.dispersy.get_member, private_key=private_key) blockingCallFromThread(reactor, self.dispersy.define_auto_load, HardKilledCommunity, self.session.dispersy_member, load=True) if self.session.config.get_megacache_enabled(): self.dispersy.database.attach_commit_callback( self.session.sqlite_db.commit_now) # notify dispersy finished loading self.session.notifier.notify(NTFY_DISPERSY, NTFY_STARTED, None) self.session.readable_status = STATE_LOADING_COMMUNITIES # We should load the mainline DHT before loading the IPv8 overlays since the DHT is used for the tunnel overlay. if self.session.config.get_mainline_dht_enabled(): self.session.readable_status = STATE_START_MAINLINE_DHT from Tribler.Core.DecentralizedTracking import mainlineDHT self.mainline_dht = mainlineDHT.init( ('127.0.0.1', self.session.config.get_mainline_dht_port()), self.session.config.get_state_dir()) self.upnp_ports.append( (self.session.config.get_mainline_dht_port(), 'UDP')) if self.ipv8: self.load_ipv8_overlays() if self.dispersy: self.load_dispersy_communities() tunnel_community_ports = self.session.config.get_tunnel_community_socks5_listen_ports( ) self.session.config.set_anon_proxy_settings( 2, ("127.0.0.1", tunnel_community_ports)) if self.session.config.get_channel_search_enabled( ) and self.session.config.get_dispersy_enabled(): self.session.readable_status = STATE_INITIALIZE_CHANNEL_MGR from Tribler.Core.Modules.channel.channel_manager import ChannelManager self.channel_manager = ChannelManager(self.session) self.channel_manager.initialize() if self.session.config.get_libtorrent_enabled(): self.session.readable_status = STATE_START_LIBTORRENT from Tribler.Core.Libtorrent.LibtorrentMgr import LibtorrentMgr self.ltmgr = LibtorrentMgr(self.session) self.ltmgr.initialize() for port, protocol in self.upnp_ports: self.ltmgr.add_upnp_mapping(port, protocol) # add task for tracker checking if self.session.config.get_torrent_checking_enabled(): self.session.readable_status = STATE_START_TORRENT_CHECKER self.torrent_checker = TorrentChecker(self.session) self.torrent_checker.initialize() if self.rtorrent_handler and self.session.config.get_dispersy_enabled( ): self.session.readable_status = STATE_START_REMOTE_TORRENT_HANDLER self.rtorrent_handler.initialize() if self.api_manager: self.session.readable_status = STATE_START_API_ENDPOINTS self.api_manager.root_endpoint.start_endpoints() if self.session.config.get_watch_folder_enabled(): self.session.readable_status = STATE_START_WATCH_FOLDER self.watch_folder = WatchFolder(self.session) self.watch_folder.start() if self.session.config.get_credit_mining_enabled(): self.session.readable_status = STATE_START_CREDIT_MINING from Tribler.Core.CreditMining.CreditMiningManager import CreditMiningManager self.credit_mining_manager = CreditMiningManager(self.session) if self.session.config.get_resource_monitor_enabled(): self.resource_monitor = ResourceMonitor(self.session) self.resource_monitor.start() self.version_check_manager = VersionCheckManager(self.session) self.session.set_download_states_callback(self.sesscb_states_callback) self.initComplete = True def add(self, tdef, dscfg, pstate=None, setupDelay=0, hidden=False, share_mode=False, checkpoint_disabled=False): """ Called by any thread """ d = None with self.session_lock: if not isinstance( tdef, TorrentDefNoMetainfo) and not tdef.is_finalized(): raise ValueError("TorrentDef not finalized") infohash = tdef.get_infohash() # Create the destination directory if it does not exist yet try: if not os.path.isdir(dscfg.get_dest_dir()): os.makedirs(dscfg.get_dest_dir()) except OSError: self._logger.error( "Unable to create the download destination directory.") if dscfg.get_time_added() == 0: dscfg.set_time_added(int(timemod.time())) # Check if running or saved on disk if infohash in self.downloads: self._logger.info( "Torrent already exists in the downloads. Infohash:%s", infohash.encode('hex')) from Tribler.Core.Libtorrent.LibtorrentDownloadImpl import LibtorrentDownloadImpl d = LibtorrentDownloadImpl(self.session, tdef) if pstate is None: # not already resuming pstate = self.load_download_pstate_noexc(infohash) if pstate is not None: self._logger.debug("tlm: add: pstate is %s %s", pstate.get('dlstate', 'status'), pstate.get('dlstate', 'progress')) # Store in list of Downloads, always. self.downloads[infohash] = d setup_deferred = d.setup(dscfg, pstate, wrapperDelay=setupDelay, share_mode=share_mode, checkpoint_disabled=checkpoint_disabled) setup_deferred.addCallback(self.on_download_handle_created) if d and not hidden and self.session.config.get_megacache_enabled(): @forceDBThread def write_my_pref(): torrent_id = self.torrent_db.getTorrentID(infohash) data = {'destination_path': d.get_dest_dir()} self.mypref_db.addMyPreference(torrent_id, data) if isinstance(tdef, TorrentDefNoMetainfo): self.torrent_db.addOrGetTorrentID(tdef.get_infohash()) self.torrent_db.updateTorrent(tdef.get_infohash(), name=tdef.get_name_as_unicode()) write_my_pref() elif self.rtorrent_handler: self.rtorrent_handler.save_torrent(tdef, write_my_pref) else: self.torrent_db.addExternalTorrent( tdef, extra_info={'status': 'good'}) write_my_pref() return d def on_download_handle_created(self, download): """ This method is called when the download handle has been created. Immediately checkpoint the download and write the resume data. """ return download.checkpoint() def remove(self, d, removecontent=False, removestate=True, hidden=False): """ Called by any thread """ out = None with self.session_lock: out = d.stop_remove(removestate=removestate, removecontent=removecontent) infohash = d.get_def().get_infohash() if infohash in self.downloads: del self.downloads[infohash] if not hidden: self.remove_id(infohash) if self.tunnel_community: self.tunnel_community.on_download_removed(d) return out or succeed(None) def remove_id(self, infohash): @forceDBThread def do_db(): torrent_id = self.torrent_db.getTorrentID(infohash) if torrent_id: self.mypref_db.deletePreference(torrent_id) if self.session.config.get_megacache_enabled(): do_db() def get_downloads(self): """ Called by any thread """ with self.session_lock: return self.downloads.values() # copy, is mutable def get_download(self, infohash): """ Called by any thread """ with self.session_lock: return self.downloads.get(infohash, None) def download_exists(self, infohash): with self.session_lock: return infohash in self.downloads @blocking_call_on_reactor_thread @inlineCallbacks def update_download_hops(self, download, new_hops): """ Update the amount of hops for a specified download. This can be done on runtime. """ infohash = binascii.hexlify(download.tdef.get_infohash()) self._logger.info("Updating the amount of hops of download %s", infohash) yield self.session.remove_download(download) # copy the old download_config and change the hop count dscfg = download.copy() dscfg.set_hops(new_hops) self.session.start_download_from_tdef(download.tdef, dscfg) def update_trackers(self, infohash, trackers): """ Update the trackers for a download. :param infohash: infohash of the torrent that needs to be updated :param trackers: A list of tracker urls. """ dl = self.get_download(infohash) old_def = dl.get_def() if dl else None if old_def: old_trackers = old_def.get_trackers_as_single_tuple() new_trackers = list(set(trackers) - set(old_trackers)) all_trackers = list(old_trackers) + new_trackers if new_trackers: # Add new trackers to the download dl.add_trackers(new_trackers) # Create a new TorrentDef if isinstance(old_def, TorrentDefNoMetainfo): new_def = TorrentDefNoMetainfo(old_def.get_infohash(), old_def.get_name(), dl.get_magnet_link()) else: metainfo = old_def.get_metainfo() if len(all_trackers) > 1: metainfo["announce-list"] = [all_trackers] else: metainfo["announce"] = all_trackers[0] new_def = TorrentDef.load_from_dict(metainfo) # Set TorrentDef + checkpoint dl.set_def(new_def) dl.checkpoint() if isinstance(old_def, TorrentDefNoMetainfo): @forceDBThread def update_trackers_db(infohash, new_trackers): torrent_id = self.torrent_db.getTorrentID(infohash) if torrent_id is not None: self.torrent_db.addTorrentTrackerMappingInBatch( torrent_id, new_trackers) self.session.notifier.notify( NTFY_TORRENTS, NTFY_UPDATE, infohash) if self.session.config.get_megacache_enabled(): update_trackers_db(infohash, new_trackers) elif not isinstance( old_def, TorrentDefNoMetainfo) and self.rtorrent_handler: # Update collected torrents self.rtorrent_handler.save_torrent(new_def) # # State retrieval # def stop_download_states_callback(self): """ Stop any download states callback if present. """ if self.is_pending_task_active("download_states_lc"): self.cancel_pending_task("download_states_lc") def set_download_states_callback(self, user_callback, interval=1.0): """ Set the download state callback. Remove any old callback if it's present. """ self.stop_download_states_callback() self._logger.debug( "Starting the download state callback with interval %f", interval) self.download_states_lc = self.register_task( "download_states_lc", LoopingCall(self._invoke_states_cb, user_callback)) self.download_states_lc.start(interval) def _invoke_states_cb(self, callback): """ Invoke the download states callback with a list of the download states. """ dslist = [] for d in self.downloads.values(): d.set_moreinfo_stats( True in self.get_peer_list or d.get_def().get_infohash() in self.get_peer_list) ds = d.network_get_state(None) dslist.append(ds) def on_cb_done(new_get_peer_list): self.get_peer_list = new_get_peer_list return deferToThread(callback, dslist).addCallback(on_cb_done) def sesscb_states_callback(self, states_list): """ This method is periodically (every second) called with a list of the download states of the active downloads. """ self.state_cb_count += 1 # Check to see if a download has finished new_active_downloads = [] do_checkpoint = False seeding_download_list = [] for ds in states_list: state = ds.get_status() download = ds.get_download() tdef = download.get_def() safename = tdef.get_name_as_unicode() if state == DLSTATUS_DOWNLOADING: new_active_downloads.append(safename) elif state == DLSTATUS_STOPPED_ON_ERROR: self._logger.error("Error during download: %s", repr(ds.get_error())) if self.download_exists(tdef.get_infohash()): self.get_download(tdef.get_infohash()).stop() self.session.notifier.notify(NTFY_TORRENT, NTFY_ERROR, tdef.get_infohash(), repr(ds.get_error())) elif state == DLSTATUS_SEEDING: seeding_download_list.append({ u'infohash': tdef.get_infohash(), u'download': download }) if safename in self.previous_active_downloads: self.session.notifier.notify(NTFY_TORRENT, NTFY_FINISHED, tdef.get_infohash(), safename) do_checkpoint = True elif download.get_hops() == 0 and download.get_safe_seeding(): # Re-add the download with anonymity enabled hops = self.session.config.get_default_number_hops() self.update_download_hops(download, hops) self.previous_active_downloads = new_active_downloads if do_checkpoint: self.session.checkpoint_downloads() if self.state_cb_count % 4 == 0: if self.tunnel_community: self.tunnel_community.monitor_downloads(states_list) if self.credit_mining_manager: self.credit_mining_manager.monitor_downloads(states_list) return [] # # Persistence methods # def load_checkpoint(self): """ Called by any thread """ def do_load_checkpoint(): with self.session_lock: for i, filename in enumerate( iglob( os.path.join( self.session.get_downloads_pstate_dir(), '*.state'))): self.resume_download(filename, setupDelay=i * 0.1) if self.initComplete: do_load_checkpoint() else: self.register_task("load_checkpoint", reactor.callLater(1, do_load_checkpoint)) def load_download_pstate_noexc(self, infohash): """ Called by any thread, assume session_lock already held """ try: basename = binascii.hexlify(infohash) + '.state' filename = os.path.join(self.session.get_downloads_pstate_dir(), basename) if os.path.exists(filename): return self.load_download_pstate(filename) else: self._logger.info("%s not found", basename) except Exception: self._logger.exception("Exception while loading pstate: %s", infohash) def resume_download(self, filename, setupDelay=0): tdef = dscfg = pstate = None try: pstate = self.load_download_pstate(filename) # SWIFTPROC metainfo = pstate.get('state', 'metainfo') if 'infohash' in metainfo: tdef = TorrentDefNoMetainfo(metainfo['infohash'], metainfo['name'], metainfo.get('url', None)) else: tdef = TorrentDef.load_from_dict(metainfo) if pstate.has_option('download_defaults', 'saveas') and \ isinstance(pstate.get('download_defaults', 'saveas'), tuple): pstate.set('download_defaults', 'saveas', pstate.get('download_defaults', 'saveas')[-1]) dscfg = DownloadStartupConfig(pstate) except: # pstate is invalid or non-existing _, file = os.path.split(filename) infohash = binascii.unhexlify(file[:-6]) torrent_data = self.torrent_store.get(infohash) if torrent_data: try: tdef = TorrentDef.load_from_memory(torrent_data) defaultDLConfig = DefaultDownloadStartupConfig.getInstance( ) dscfg = defaultDLConfig.copy() if self.mypref_db is not None: dest_dir = self.mypref_db.getMyPrefStatsInfohash( infohash) if dest_dir and os.path.isdir(dest_dir): dscfg.set_dest_dir(dest_dir) except ValueError: self._logger.warning("tlm: torrent data invalid") if pstate is not None: has_resume_data = pstate.get('state', 'engineresumedata') is not None self._logger.debug( "tlm: load_checkpoint: resumedata %s", 'len %s ' % len(pstate.get('state', 'engineresumedata')) if has_resume_data else 'None') if tdef and dscfg: if dscfg.get_dest_dir() != '': # removed torrent ignoring try: if not self.download_exists(tdef.get_infohash()): self.add(tdef, dscfg, pstate, setupDelay=setupDelay) else: self._logger.info( "tlm: not resuming checkpoint because download has already been added" ) except Exception as e: self._logger.exception( "tlm: load check_point: exception while adding download %s", tdef) else: self._logger.info("tlm: removing checkpoint %s destdir is %s", filename, dscfg.get_dest_dir()) os.remove(filename) else: self._logger.info("tlm: could not resume checkpoint %s %s %s", filename, tdef, dscfg) def checkpoint_downloads(self): """ Checkpoints all running downloads in Tribler. Even if the list of Downloads changes in the mean time this is no problem. For removals, dllist will still hold a pointer to the download, and additions are no problem (just won't be included in list of states returned via callback). """ downloads = self.downloads.values() deferred_list = [] self._logger.debug("tlm: checkpointing %s downloads", len(downloads)) for download in downloads: deferred_list.append(download.checkpoint()) return DeferredList(deferred_list) def shutdown_downloads(self): """ Shutdown all downloads in Tribler. """ for download in self.downloads.values(): download.stop() def remove_pstate(self, infohash): def do_remove(): if not self.download_exists(infohash): dlpstatedir = self.session.get_downloads_pstate_dir() # Remove checkpoint hexinfohash = binascii.hexlify(infohash) try: basename = hexinfohash + '.state' filename = os.path.join(dlpstatedir, basename) self._logger.debug( "remove pstate: removing dlcheckpoint entry %s", filename) if os.access(filename, os.F_OK): os.remove(filename) except: # Show must go on self._logger.exception("Could not remove state") else: self._logger.warning( "remove pstate: download is back, restarted? Canceling removal! %s", repr(infohash)) reactor.callFromThread(do_remove) @inlineCallbacks def early_shutdown(self): """ Called as soon as Session shutdown is initiated. Used to start shutdown tasks that takes some time and that can run in parallel to checkpointing, etc. :returns a Deferred that will fire once all dependencies acknowledge they have shutdown. """ self._logger.info("tlm: early_shutdown") self.shutdown_task_manager() # Note: session_lock not held self.shutdownstarttime = timemod.time() if self.credit_mining_manager: yield self.credit_mining_manager.shutdown() self.credit_mining_manager = None if self.torrent_checker: yield self.torrent_checker.shutdown() self.torrent_checker = None if self.channel_manager: yield self.channel_manager.shutdown() self.channel_manager = None if self.search_manager: yield self.search_manager.shutdown() self.search_manager = None if self.rtorrent_handler: yield self.rtorrent_handler.shutdown() self.rtorrent_handler = None if self.video_server: yield self.video_server.shutdown_server() self.video_server = None if self.version_check_manager: self.version_check_manager.stop() self.version_check_manager = None if self.resource_monitor: self.resource_monitor.stop() self.resource_monitor = None self.tracker_manager = None if self.tunnel_community and self.triblerchain_community: # We unload these overlays manually since the triblerchain has to be unloaded after the tunnel overlay. yield self.ipv8.unload_overlay(self.tunnel_community) yield self.ipv8.unload_overlay(self.triblerchain_community) if self.dispersy: self._logger.info("lmc: Shutting down Dispersy...") now = timemod.time() try: success = yield self.dispersy.stop() except: print_exc() success = False diff = timemod.time() - now if success: self._logger.info( "lmc: Dispersy successfully shutdown in %.2f seconds", diff) else: self._logger.info( "lmc: Dispersy failed to shutdown in %.2f seconds", diff) if self.ipv8: yield self.ipv8.stop(stop_reactor=False) if self.metadata_store is not None: yield self.metadata_store.close() self.metadata_store = None if self.tftp_handler is not None: yield self.tftp_handler.shutdown() self.tftp_handler = None if self.channelcast_db is not None: yield self.channelcast_db.close() self.channelcast_db = None if self.votecast_db is not None: yield self.votecast_db.close() self.votecast_db = None if self.mypref_db is not None: yield self.mypref_db.close() self.mypref_db = None if self.torrent_db is not None: yield self.torrent_db.close() self.torrent_db = None if self.peer_db is not None: yield self.peer_db.close() self.peer_db = None if self.mainline_dht is not None: from Tribler.Core.DecentralizedTracking import mainlineDHT yield mainlineDHT.deinit(self.mainline_dht) self.mainline_dht = None if self.torrent_store is not None: yield self.torrent_store.close() self.torrent_store = None if self.watch_folder is not None: yield self.watch_folder.stop() self.watch_folder = None # We close the API manager as late as possible during shutdown. if self.api_manager is not None: yield self.api_manager.stop() self.api_manager = None def network_shutdown(self): try: self._logger.info("tlm: network_shutdown") ts = enumerate_threads() self._logger.info("tlm: Number of threads still running %d", len(ts)) for t in ts: self._logger.info( "tlm: Thread still running=%s, daemon=%s, instance=%s", t.getName(), t.isDaemon(), t) except: print_exc() # Stop network thread self.sessdoneflag.set() # Shutdown libtorrent session after checkpoints have been made if self.ltmgr is not None: self.ltmgr.shutdown() self.ltmgr = None def save_download_pstate(self, infohash, pstate): """ Called by network thread """ self.downloads[infohash].pstate_for_restart = pstate self.register_task("save_pstate %f" % timemod.clock(), self.downloads[infohash].save_resume_data()) def load_download_pstate(self, filename): """ Called by any thread """ pstate = CallbackConfigParser() pstate.read_file(filename) return pstate
def register(self, session, session_lock): assert isInIOThread() if not self.registered: self.registered = True self.session = session self.session_lock = session_lock # On Mac, we bundle the root certificate for the SSL validation since Twisted is not using the root # certificates provided by the system trust store. if sys.platform == 'darwin': os.environ['SSL_CERT_FILE'] = os.path.join(get_lib_path(), 'root_certs_mac.pem') if self.session.config.get_torrent_store_enabled(): from Tribler.Core.leveldbstore import LevelDbStore self.torrent_store = LevelDbStore(self.session.config.get_torrent_store_dir()) if not self.torrent_store.get_db(): raise RuntimeError("Torrent store (leveldb) is None which should not normally happen") if self.session.config.get_metadata_enabled(): from Tribler.Core.leveldbstore import LevelDbStore self.metadata_store = LevelDbStore(self.session.config.get_metadata_store_dir()) if not self.metadata_store.get_db(): raise RuntimeError("Metadata store (leveldb) is None which should not normally happen") # torrent collecting: RemoteTorrentHandler if self.session.config.get_torrent_collecting_enabled() and self.session.config.get_dispersy_enabled(): from Tribler.Core.RemoteTorrentHandler import RemoteTorrentHandler self.rtorrent_handler = RemoteTorrentHandler(self.session) # TODO(emilon): move this to a megacache component or smth if self.session.config.get_megacache_enabled(): from Tribler.Core.CacheDB.SqliteCacheDBHandler import (PeerDBHandler, TorrentDBHandler, MyPreferenceDBHandler, VoteCastDBHandler, ChannelCastDBHandler) from Tribler.Core.Category.Category import Category self._logger.debug('tlm: Reading Session state from %s', self.session.config.get_state_dir()) self.category = Category() # create DBHandlers self.peer_db = PeerDBHandler(self.session) self.torrent_db = TorrentDBHandler(self.session) self.mypref_db = MyPreferenceDBHandler(self.session) self.votecast_db = VoteCastDBHandler(self.session) self.channelcast_db = ChannelCastDBHandler(self.session) # initializes DBHandlers self.peer_db.initialize() self.torrent_db.initialize() self.mypref_db.initialize() self.votecast_db.initialize() self.channelcast_db.initialize() from Tribler.Core.Modules.tracker_manager import TrackerManager self.tracker_manager = TrackerManager(self.session) if self.session.config.get_video_server_enabled(): self.video_server = VideoServer(self.session.config.get_video_server_port(), self.session) self.video_server.start() # IPv8 if self.session.config.get_ipv8_enabled(): from Tribler.pyipv8.ipv8.configuration import get_default_configuration ipv8_config = get_default_configuration() ipv8_config['port'] = self.session.config.get_dispersy_port() ipv8_config['address'] = self.session.config.get_ipv8_address() ipv8_config['overlays'] = [] ipv8_config['keys'] = [] # We load the keys ourselves if self.session.config.get_ipv8_bootstrap_override(): import Tribler.pyipv8.ipv8.deprecated.community as community_file community_file._DEFAULT_ADDRESSES = [self.session.config.get_ipv8_bootstrap_override()] community_file._DNS_ADDRESSES = [] self.ipv8 = IPv8(ipv8_config, enable_statistics=self.session.config.get_ipv8_statistics()) self.session.config.set_anon_proxy_settings(2, ("127.0.0.1", self.session. config.get_tunnel_community_socks5_listen_ports())) # Dispersy self.tftp_handler = None if self.session.config.get_dispersy_enabled(): from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import MIMEndpoint from Tribler.dispersy.endpoint import IPv8toDispersyAdapter # set communication endpoint if self.session.config.get_ipv8_enabled(): dispersy_endpoint = IPv8toDispersyAdapter(self.ipv8.endpoint) else: dispersy_endpoint = MIMEndpoint(self.session.config.get_dispersy_port()) working_directory = unicode(self.session.config.get_state_dir()) self.dispersy = Dispersy(dispersy_endpoint, working_directory) self.dispersy.statistics.enable_debug_statistics(False) # register TFTP service from Tribler.Core.TFTP.handler import TftpHandler self.tftp_handler = TftpHandler(self.session, dispersy_endpoint, "fffffffd".decode('hex'), block_size=1024) self.tftp_handler.initialize() # Torrent search if self.session.config.get_torrent_search_enabled() or self.session.config.get_channel_search_enabled(): self.search_manager = SearchManager(self.session) self.search_manager.initialize() if not self.initComplete: self.init() self.session.add_observer(self.on_tribler_started, NTFY_TRIBLER, [NTFY_STARTED]) self.session.notifier.notify(NTFY_TRIBLER, NTFY_STARTED, None) return self.startup_deferred
def setUp(self, annotate=True): yield super(AbstractTestCommunity, self).setUp(annotate=annotate) self.dispersy = Dispersy(ManualEnpoint(0), self.getStateDir()) self.dispersy._database.open() self.master_member = DummyMember(self.dispersy, 1, "a" * 20) self.member = self.dispersy.get_new_member(u"curve25519")
class TriblerLaunchMany(TaskManager): def __init__(self): """ Called only once (unless we have multiple Sessions) by MainThread """ super(TriblerLaunchMany, self).__init__() self.initComplete = False self.registered = False self.dispersy = None self.ipv8 = None self.ipv8_start_time = 0 self.state_cb_count = 0 self.previous_active_downloads = [] self.download_states_lc = None self.get_peer_list = [] self._logger = logging.getLogger(self.__class__.__name__) self.downloads = {} self.upnp_ports = [] self.session = None self.session_lock = None self.sessdoneflag = Event() self.shutdownstarttime = None # modules self.torrent_store = None self.metadata_store = None self.rtorrent_handler = None self.tftp_handler = None self.api_manager = None self.watch_folder = None self.version_check_manager = None self.resource_monitor = None self.category = None self.peer_db = None self.torrent_db = None self.mypref_db = None self.votecast_db = None self.channelcast_db = None self.search_manager = None self.channel_manager = None self.video_server = None self.mainline_dht = None self.ltmgr = None self.tracker_manager = None self.torrent_checker = None self.tunnel_community = None self.trustchain_community = None self.wallets = {} self.popularity_community = None self.startup_deferred = Deferred() self.credit_mining_manager = None self.market_community = None self.dht_community = None self.payout_manager = None self.mds = None def register(self, session, session_lock): assert isInIOThread() if not self.registered: self.registered = True self.session = session self.session_lock = session_lock # On Mac, we bundle the root certificate for the SSL validation since Twisted is not using the root # certificates provided by the system trust store. if sys.platform == 'darwin': os.environ['SSL_CERT_FILE'] = os.path.join(get_lib_path(), 'root_certs_mac.pem') if self.session.config.get_torrent_store_enabled(): from Tribler.Core.leveldbstore import LevelDbStore self.torrent_store = LevelDbStore(self.session.config.get_torrent_store_dir()) if not self.torrent_store.get_db(): raise RuntimeError("Torrent store (leveldb) is None which should not normally happen") if self.session.config.get_metadata_enabled(): from Tribler.Core.leveldbstore import LevelDbStore self.metadata_store = LevelDbStore(self.session.config.get_metadata_store_dir()) if not self.metadata_store.get_db(): raise RuntimeError("Metadata store (leveldb) is None which should not normally happen") # torrent collecting: RemoteTorrentHandler if self.session.config.get_torrent_collecting_enabled() and self.session.config.get_dispersy_enabled(): from Tribler.Core.RemoteTorrentHandler import RemoteTorrentHandler self.rtorrent_handler = RemoteTorrentHandler(self.session) # TODO(emilon): move this to a megacache component or smth if self.session.config.get_megacache_enabled(): from Tribler.Core.CacheDB.SqliteCacheDBHandler import (PeerDBHandler, TorrentDBHandler, MyPreferenceDBHandler, VoteCastDBHandler, ChannelCastDBHandler) from Tribler.Core.Category.Category import Category self._logger.debug('tlm: Reading Session state from %s', self.session.config.get_state_dir()) self.category = Category() # create DBHandlers self.peer_db = PeerDBHandler(self.session) self.torrent_db = TorrentDBHandler(self.session) self.mypref_db = MyPreferenceDBHandler(self.session) self.votecast_db = VoteCastDBHandler(self.session) self.channelcast_db = ChannelCastDBHandler(self.session) # initializes DBHandlers self.peer_db.initialize() self.torrent_db.initialize() self.mypref_db.initialize() self.votecast_db.initialize() self.channelcast_db.initialize() from Tribler.Core.Modules.tracker_manager import TrackerManager self.tracker_manager = TrackerManager(self.session) if self.session.config.get_video_server_enabled(): self.video_server = VideoServer(self.session.config.get_video_server_port(), self.session) self.video_server.start() # IPv8 if self.session.config.get_ipv8_enabled(): from Tribler.pyipv8.ipv8.configuration import get_default_configuration ipv8_config = get_default_configuration() ipv8_config['port'] = self.session.config.get_dispersy_port() ipv8_config['address'] = self.session.config.get_ipv8_address() ipv8_config['overlays'] = [] ipv8_config['keys'] = [] # We load the keys ourselves if self.session.config.get_ipv8_bootstrap_override(): import Tribler.pyipv8.ipv8.deprecated.community as community_file community_file._DEFAULT_ADDRESSES = [self.session.config.get_ipv8_bootstrap_override()] community_file._DNS_ADDRESSES = [] self.ipv8 = IPv8(ipv8_config, enable_statistics=self.session.config.get_ipv8_statistics()) self.session.config.set_anon_proxy_settings(2, ("127.0.0.1", self.session. config.get_tunnel_community_socks5_listen_ports())) # Dispersy self.tftp_handler = None if self.session.config.get_dispersy_enabled(): from Tribler.dispersy.dispersy import Dispersy from Tribler.dispersy.endpoint import MIMEndpoint from Tribler.dispersy.endpoint import IPv8toDispersyAdapter # set communication endpoint if self.session.config.get_ipv8_enabled(): dispersy_endpoint = IPv8toDispersyAdapter(self.ipv8.endpoint) else: dispersy_endpoint = MIMEndpoint(self.session.config.get_dispersy_port()) working_directory = unicode(self.session.config.get_state_dir()) self.dispersy = Dispersy(dispersy_endpoint, working_directory) self.dispersy.statistics.enable_debug_statistics(False) # register TFTP service from Tribler.Core.TFTP.handler import TftpHandler self.tftp_handler = TftpHandler(self.session, dispersy_endpoint, "fffffffd".decode('hex'), block_size=1024) self.tftp_handler.initialize() # Torrent search if self.session.config.get_torrent_search_enabled() or self.session.config.get_channel_search_enabled(): self.search_manager = SearchManager(self.session) self.search_manager.initialize() if not self.initComplete: self.init() self.session.add_observer(self.on_tribler_started, NTFY_TRIBLER, [NTFY_STARTED]) self.session.notifier.notify(NTFY_TRIBLER, NTFY_STARTED, None) return self.startup_deferred def on_tribler_started(self, subject, changetype, objectID, *args): reactor.callFromThread(self.startup_deferred.callback, None) def load_ipv8_overlays(self): # Discovery Community with open(self.session.config.get_permid_keypair_filename(), 'r') as key_file: content = key_file.read() content = content[31:-30].replace('\n', '').decode("BASE64") peer = Peer(M2CryptoSK(keystring=content)) discovery_community = DiscoveryCommunity(peer, self.ipv8.endpoint, self.ipv8.network) discovery_community.resolve_dns_bootstrap_addresses() self.ipv8.overlays.append(discovery_community) self.ipv8.strategies.append((RandomChurn(discovery_community), -1)) if not self.session.config.get_dispersy_enabled(): self.ipv8.strategies.append((RandomWalk(discovery_community), 20)) if self.session.config.get_testnet(): peer = Peer(self.session.trustchain_testnet_keypair) else: peer = Peer(self.session.trustchain_keypair) # TrustChain Community if self.session.config.get_trustchain_enabled(): from Tribler.pyipv8.ipv8.attestation.trustchain.community import TrustChainCommunity, \ TrustChainTestnetCommunity community_cls = TrustChainTestnetCommunity if self.session.config.get_testnet() else TrustChainCommunity self.trustchain_community = community_cls(peer, self.ipv8.endpoint, self.ipv8.network, working_directory=self.session.config.get_state_dir()) self.ipv8.overlays.append(self.trustchain_community) self.ipv8.strategies.append((EdgeWalk(self.trustchain_community), 20)) tc_wallet = TrustchainWallet(self.trustchain_community) self.wallets[tc_wallet.get_identifier()] = tc_wallet # DHT Community if self.session.config.get_dht_enabled(): from Tribler.pyipv8.ipv8.dht.discovery import DHTDiscoveryCommunity self.dht_community = DHTDiscoveryCommunity(peer, self.ipv8.endpoint, self.ipv8.network) self.ipv8.overlays.append(self.dht_community) self.ipv8.strategies.append((RandomWalk(self.dht_community), 20)) # Tunnel Community if self.session.config.get_tunnel_community_enabled(): from Tribler.community.triblertunnel.community import TriblerTunnelCommunity, TriblerTunnelTestnetCommunity community_cls = TriblerTunnelTestnetCommunity if self.session.config.get_testnet() else \ TriblerTunnelCommunity if self.mainline_dht: dht_provider = MainlineDHTProvider(self.mainline_dht, self.session.config.get_dispersy_port()) else: dht_provider = DHTCommunityProvider(self.dht_community, self.session.config.get_dispersy_port()) self.tunnel_community = community_cls(peer, self.ipv8.endpoint, self.ipv8.network, tribler_session=self.session, dht_provider=dht_provider, bandwidth_wallet=self.wallets["MB"]) self.ipv8.overlays.append(self.tunnel_community) self.ipv8.strategies.append((RandomWalk(self.tunnel_community), 20)) # Market Community if self.session.config.get_market_community_enabled() and self.session.config.get_dht_enabled(): from Tribler.community.market.community import MarketCommunity, MarketTestnetCommunity community_cls = MarketTestnetCommunity if self.session.config.get_testnet() else MarketCommunity self.market_community = community_cls(peer, self.ipv8.endpoint, self.ipv8.network, tribler_session=self.session, trustchain=self.trustchain_community, dht=self.dht_community, wallets=self.wallets, working_directory=self.session.config.get_state_dir()) self.ipv8.overlays.append(self.market_community) self.ipv8.strategies.append((RandomWalk(self.market_community), 20)) # Popular Community if self.session.config.get_popularity_community_enabled(): from Tribler.community.popularity.community import PopularityCommunity self.popularity_community = PopularityCommunity(peer, self.ipv8.endpoint, self.ipv8.network, torrent_db=self.session.lm.torrent_db, session=self.session) self.ipv8.overlays.append(self.popularity_community) self.ipv8.strategies.append((RandomWalk(self.popularity_community), 20)) self.popularity_community.start() def enable_ipv8_statistics(self): if self.session.config.get_ipv8_statistics(): for overlay in self.ipv8.overlays: self.ipv8.endpoint.enable_community_statistics(overlay.get_prefix(), True) def load_dispersy_communities(self): self._logger.info("tribler: Preparing Dispersy communities...") now_time = timemod.time() default_kwargs = {'tribler_session': self.session} # Search Community if self.session.config.get_torrent_search_enabled() and self.dispersy: from Tribler.community.search.community import SearchCommunity self.dispersy.define_auto_load(SearchCommunity, self.session.dispersy_member, load=True, kargs=default_kwargs) # AllChannel Community if self.session.config.get_channel_search_enabled() and self.dispersy: from Tribler.community.allchannel.community import AllChannelCommunity self.dispersy.define_auto_load(AllChannelCommunity, self.session.dispersy_member, load=True, kargs=default_kwargs) # Channel Community if self.session.config.get_channel_community_enabled() and self.dispersy: from Tribler.community.channel.community import ChannelCommunity self.dispersy.define_auto_load(ChannelCommunity, self.session.dispersy_member, load=True, kargs=default_kwargs) # PreviewChannel Community if self.session.config.get_preview_channel_community_enabled() and self.dispersy: from Tribler.community.channel.preview import PreviewChannelCommunity self.dispersy.define_auto_load(PreviewChannelCommunity, self.session.dispersy_member, kargs=default_kwargs) self._logger.info("tribler: communities are ready in %.2f seconds", timemod.time() - now_time) def init(self): if self.dispersy: from Tribler.dispersy.community import HardKilledCommunity self._logger.info("lmc: Starting Dispersy...") self.session.readable_status = STATE_STARTING_DISPERSY now = timemod.time() success = self.dispersy.start(self.session.autoload_discovery) diff = timemod.time() - now if success: self._logger.info("lmc: Dispersy started successfully in %.2f seconds [port: %d]", diff, self.dispersy.wan_address[1]) else: self._logger.info("lmc: Dispersy failed to start in %.2f seconds", diff) self.upnp_ports.append((self.dispersy.wan_address[1], 'UDP')) from Tribler.dispersy.crypto import M2CryptoSK private_key = self.dispersy.crypto.key_to_bin( M2CryptoSK(filename=self.session.config.get_permid_keypair_filename())) self.session.dispersy_member = blockingCallFromThread(reactor, self.dispersy.get_member, private_key=private_key) blockingCallFromThread(reactor, self.dispersy.define_auto_load, HardKilledCommunity, self.session.dispersy_member, load=True) if self.session.config.get_megacache_enabled(): self.dispersy.database.attach_commit_callback(self.session.sqlite_db.commit_now) # notify dispersy finished loading self.session.notifier.notify(NTFY_DISPERSY, NTFY_STARTED, None) self.session.readable_status = STATE_LOADING_COMMUNITIES # We should load the mainline DHT before loading the IPv8 overlays since the DHT is used for the tunnel overlay. if self.session.config.get_mainline_dht_enabled(): self.session.readable_status = STATE_START_MAINLINE_DHT from Tribler.Core.DecentralizedTracking import mainlineDHT self.mainline_dht = mainlineDHT.init(('127.0.0.1', self.session.config.get_mainline_dht_port()), self.session.config.get_state_dir()) self.upnp_ports.append((self.session.config.get_mainline_dht_port(), 'UDP')) # Wallets if self.session.config.get_bitcoinlib_enabled(): try: from Tribler.Core.Modules.wallet.btc_wallet import BitcoinWallet, BitcoinTestnetWallet wallet_path = os.path.join(self.session.config.get_state_dir(), 'wallet') btc_wallet = BitcoinWallet(wallet_path) btc_testnet_wallet = BitcoinTestnetWallet(wallet_path) self.wallets[btc_wallet.get_identifier()] = btc_wallet self.wallets[btc_testnet_wallet.get_identifier()] = btc_testnet_wallet except ImportError: self._logger.error("bitcoinlib library cannot be found, Bitcoin wallet not available!") if self.session.config.get_dummy_wallets_enabled(): # For debugging purposes, we create dummy wallets dummy_wallet1 = DummyWallet1() self.wallets[dummy_wallet1.get_identifier()] = dummy_wallet1 dummy_wallet2 = DummyWallet2() self.wallets[dummy_wallet2.get_identifier()] = dummy_wallet2 if self.ipv8: self.ipv8_start_time = time.time() self.load_ipv8_overlays() self.enable_ipv8_statistics() if self.dispersy: self.load_dispersy_communities() tunnel_community_ports = self.session.config.get_tunnel_community_socks5_listen_ports() self.session.config.set_anon_proxy_settings(2, ("127.0.0.1", tunnel_community_ports)) if self.session.config.get_channel_search_enabled() and self.session.config.get_dispersy_enabled(): self.session.readable_status = STATE_INITIALIZE_CHANNEL_MGR from Tribler.Core.Modules.channel.channel_manager import ChannelManager self.channel_manager = ChannelManager(self.session) self.channel_manager.initialize() if self.session.config.get_libtorrent_enabled(): self.session.readable_status = STATE_START_LIBTORRENT from Tribler.Core.Libtorrent.LibtorrentMgr import LibtorrentMgr self.ltmgr = LibtorrentMgr(self.session) self.ltmgr.initialize() for port, protocol in self.upnp_ports: self.ltmgr.add_upnp_mapping(port, protocol) # add task for tracker checking if self.session.config.get_torrent_checking_enabled(): self.session.readable_status = STATE_START_TORRENT_CHECKER self.torrent_checker = TorrentChecker(self.session) self.torrent_checker.initialize() if self.rtorrent_handler and self.session.config.get_dispersy_enabled(): self.session.readable_status = STATE_START_REMOTE_TORRENT_HANDLER self.rtorrent_handler.initialize() if self.api_manager: self.session.readable_status = STATE_START_API_ENDPOINTS self.api_manager.root_endpoint.start_endpoints() if self.session.config.get_watch_folder_enabled(): self.session.readable_status = STATE_START_WATCH_FOLDER self.watch_folder = WatchFolder(self.session) self.watch_folder.start() if self.session.config.get_credit_mining_enabled(): self.session.readable_status = STATE_START_CREDIT_MINING from Tribler.Core.CreditMining.CreditMiningManager import CreditMiningManager self.credit_mining_manager = CreditMiningManager(self.session) if self.session.config.get_resource_monitor_enabled(): self.resource_monitor = ResourceMonitor(self.session) self.resource_monitor.start() if self.session.config.get_version_checker_enabled(): self.version_check_manager = VersionCheckManager(self.session) self.version_check_manager.start() if self.session.config.get_chant_enabled(): channels_dir = os.path.join(self.session.config.get_chant_channels_dir()) database_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'metadata.db') self.mds = MetadataStore(database_path, channels_dir, self.session.trustchain_keypair) self.session.set_download_states_callback(self.sesscb_states_callback) if self.session.config.get_ipv8_enabled() and self.session.config.get_trustchain_enabled(): self.payout_manager = PayoutManager(self.trustchain_community, self.dht_community) self.initComplete = True def on_channel_download_finished(self, download, channel_id, finished_deferred=None): if download.get_channel_download(): channel_dirname = os.path.join(self.session.lm.mds.channels_dir, download.get_def().get_name()) self.mds.process_channel_dir(channel_dirname, channel_id) if finished_deferred: finished_deferred.callback(download) @db_session def update_channel(self, payload): """ We received some channel metadata, possibly over the network. Validate the signature, update the local metadata store and start downloading this channel if needed. :param payload: The channel metadata, in serialized form. """ if not payload.has_valid_signature(): raise InvalidSignatureException("The signature of the channel metadata is invalid.") channel = self.mds.ChannelMetadata.get_channel_with_id(payload.public_key) if channel: if float2time(payload.timestamp) > channel.timestamp: # Update the channel that is already there. self._logger.info("Updating channel metadata %s ts %s->%s", str(channel.public_key).encode("hex"), str(channel.timestamp), str(float2time(payload.timestamp))) channel.set(**ChannelMetadataPayload.to_dict(payload)) else: # Add new channel object to DB channel = self.mds.ChannelMetadata.from_payload(payload) channel.subscribed = True if channel.version > channel.local_version: self._logger.info("Downloading new channel version %s ver %i->%i", str(channel.public_key).encode("hex"), channel.local_version, channel.version) #TODO: handle the case where the local version is the same as the new one and is not seeded return self.download_channel(channel) def download_channel(self, channel): """ Download a channel with a given infohash and title. :param channel: The channel metadata ORM object. """ finished_deferred = Deferred() dcfg = DownloadStartupConfig() dcfg.set_dest_dir(self.mds.channels_dir) dcfg.set_channel_download(True) tdef = TorrentDefNoMetainfo(infohash=str(channel.infohash), name=channel.title) download = self.session.start_download_from_tdef(tdef, dcfg) channel_id = channel.public_key download.finished_callback = lambda dl: self.on_channel_download_finished(dl, channel_id, finished_deferred) return download, finished_deferred def updated_my_channel(self, new_torrent_path): """ Notify the core that we updated our channel. :param new_torrent_path: path to the new torrent file """ # Start the new download tdef = TorrentDef.load(new_torrent_path) dcfg = DownloadStartupConfig() dcfg.set_dest_dir(self.mds.channels_dir) dcfg.set_channel_download(True) self.add(tdef, dcfg) def add(self, tdef, dscfg, pstate=None, setupDelay=0, hidden=False, share_mode=False, checkpoint_disabled=False): """ Called by any thread """ d = None with self.session_lock: if not isinstance(tdef, TorrentDefNoMetainfo) and not tdef.is_finalized(): raise ValueError("TorrentDef not finalized") infohash = tdef.get_infohash() # Create the destination directory if it does not exist yet try: if not os.path.isdir(dscfg.get_dest_dir()): os.makedirs(dscfg.get_dest_dir()) except OSError: self._logger.error("Unable to create the download destination directory.") if dscfg.get_time_added() == 0: dscfg.set_time_added(int(timemod.time())) # Check if running or saved on disk if infohash in self.downloads: self._logger.info("Torrent already exists in the downloads. Infohash:%s", infohash.encode('hex')) from Tribler.Core.Libtorrent.LibtorrentDownloadImpl import LibtorrentDownloadImpl d = LibtorrentDownloadImpl(self.session, tdef) if pstate is None: # not already resuming pstate = self.load_download_pstate_noexc(infohash) if pstate is not None: self._logger.debug("tlm: add: pstate is %s %s", pstate.get('dlstate', 'status'), pstate.get('dlstate', 'progress')) # Store in list of Downloads, always. self.downloads[infohash] = d setup_deferred = d.setup(dscfg, pstate, wrapperDelay=setupDelay, share_mode=share_mode, checkpoint_disabled=checkpoint_disabled) setup_deferred.addCallback(self.on_download_handle_created) if d and not hidden and self.session.config.get_megacache_enabled(): @forceDBThread def write_my_pref(): torrent_id = self.torrent_db.getTorrentID(infohash) data = {'destination_path': d.get_dest_dir()} self.mypref_db.addMyPreference(torrent_id, data) if isinstance(tdef, TorrentDefNoMetainfo): self.torrent_db.addOrGetTorrentID(tdef.get_infohash()) self.torrent_db.updateTorrent(tdef.get_infohash(), name=tdef.get_name_as_unicode()) self.torrent_db._db.commit_now() write_my_pref() elif self.rtorrent_handler: self.rtorrent_handler.save_torrent(tdef, write_my_pref) else: self.torrent_db.addExternalTorrent(tdef, extra_info={'status': 'good'}) write_my_pref() return d def on_download_handle_created(self, download): """ This method is called when the download handle has been created. Immediately checkpoint the download and write the resume data. """ return download.checkpoint() def remove(self, d, removecontent=False, removestate=True, hidden=False): """ Called by any thread """ out = None with self.session_lock: out = d.stop_remove(removestate=removestate, removecontent=removecontent) infohash = d.get_def().get_infohash() if infohash in self.downloads: del self.downloads[infohash] if not hidden: self.remove_id(infohash) if self.tunnel_community: self.tunnel_community.on_download_removed(d) return out or succeed(None) def remove_id(self, infohash): @forceDBThread def do_db(): torrent_id = self.torrent_db.getTorrentID(infohash) if torrent_id: self.mypref_db.deletePreference(torrent_id) if self.session.config.get_megacache_enabled(): do_db() def get_downloads(self): """ Called by any thread """ with self.session_lock: return self.downloads.values() # copy, is mutable def get_download(self, infohash): """ Called by any thread """ with self.session_lock: return self.downloads.get(infohash, None) def download_exists(self, infohash): with self.session_lock: return infohash in self.downloads @inlineCallbacks def update_download_hops(self, download, new_hops): """ Update the amount of hops for a specified download. This can be done on runtime. """ infohash = binascii.hexlify(download.tdef.get_infohash()) self._logger.info("Updating the amount of hops of download %s", infohash) pstate = download.get_persistent_download_config() pstate.set('state', 'engineresumedata', (yield download.save_resume_data())) yield self.session.remove_download(download) # copy the old download_config and change the hop count dscfg = download.copy() dscfg.set_hops(new_hops) # If the user wants to change the hop count to 0, don't automatically bump this up to 1 anymore dscfg.set_safe_seeding(False) self.session.start_download_from_tdef(download.tdef, dscfg, pstate=pstate) def update_trackers(self, infohash, trackers): """ Update the trackers for a download. :param infohash: infohash of the torrent that needs to be updated :param trackers: A list of tracker urls. """ dl = self.get_download(infohash) old_def = dl.get_def() if dl else None if old_def: old_trackers = old_def.get_trackers_as_single_tuple() new_trackers = list(set(trackers) - set(old_trackers)) all_trackers = list(old_trackers) + new_trackers if new_trackers: # Add new trackers to the download dl.add_trackers(new_trackers) # Create a new TorrentDef if isinstance(old_def, TorrentDefNoMetainfo): new_def = TorrentDefNoMetainfo(old_def.get_infohash(), old_def.get_name(), dl.get_magnet_link()) else: metainfo = old_def.get_metainfo() if len(all_trackers) > 1: metainfo["announce-list"] = [all_trackers] else: metainfo["announce"] = all_trackers[0] new_def = TorrentDef.load_from_dict(metainfo) # Set TorrentDef + checkpoint dl.set_def(new_def) dl.checkpoint() if isinstance(old_def, TorrentDefNoMetainfo): @forceDBThread def update_trackers_db(infohash, new_trackers): torrent_id = self.torrent_db.getTorrentID(infohash) if torrent_id is not None: self.torrent_db.addTorrentTrackerMappingInBatch(torrent_id, new_trackers) self.session.notifier.notify(NTFY_TORRENTS, NTFY_UPDATE, infohash) if self.session.config.get_megacache_enabled(): update_trackers_db(infohash, new_trackers) elif not isinstance(old_def, TorrentDefNoMetainfo) and self.rtorrent_handler: # Update collected torrents self.rtorrent_handler.save_torrent(new_def) # # State retrieval # def stop_download_states_callback(self): """ Stop any download states callback if present. """ if self.is_pending_task_active("download_states_lc"): self.cancel_pending_task("download_states_lc") def set_download_states_callback(self, user_callback, interval=1.0): """ Set the download state callback. Remove any old callback if it's present. """ self.stop_download_states_callback() self._logger.debug("Starting the download state callback with interval %f", interval) self.download_states_lc = self.register_task("download_states_lc", LoopingCall(self._invoke_states_cb, user_callback)) self.download_states_lc.start(interval) def _invoke_states_cb(self, callback): """ Invoke the download states callback with a list of the download states. """ dslist = [] for d in self.downloads.values(): d.set_moreinfo_stats(True in self.get_peer_list or d.get_def().get_infohash() in self.get_peer_list) ds = d.network_get_state(None) dslist.append(ds) def on_cb_done(new_get_peer_list): self.get_peer_list = new_get_peer_list return deferToThread(callback, dslist).addCallback(on_cb_done) def sesscb_states_callback(self, states_list): """ This method is periodically (every second) called with a list of the download states of the active downloads. """ self.state_cb_count += 1 # Check to see if a download has finished new_active_downloads = [] do_checkpoint = False seeding_download_list = [] for ds in states_list: state = ds.get_status() download = ds.get_download() tdef = download.get_def() safename = tdef.get_name_as_unicode() infohash = tdef.get_infohash() if state == DLSTATUS_DOWNLOADING: new_active_downloads.append(infohash) elif state == DLSTATUS_STOPPED_ON_ERROR: self._logger.error("Error during download: %s", repr(ds.get_error())) if self.download_exists(infohash): self.get_download(infohash).stop() self.session.notifier.notify(NTFY_TORRENT, NTFY_ERROR, infohash, repr(ds.get_error())) elif state == DLSTATUS_SEEDING: seeding_download_list.append({u'infohash': infohash, u'download': download}) if infohash in self.previous_active_downloads: self.session.notifier.notify(NTFY_TORRENT, NTFY_FINISHED, infohash, safename) do_checkpoint = True elif download.get_hops() == 0 and download.get_safe_seeding(): # Re-add the download with anonymity enabled hops = self.session.config.get_default_number_hops() self.update_download_hops(download, hops) # Check the peers of this download every five seconds and add them to the payout manager when # this peer runs a Tribler instance if self.state_cb_count % 5 == 0 and download.get_hops() == 0 and self.payout_manager: for peer in download.get_peerlist(): if peer["extended_version"].startswith('Tribler'): self.payout_manager.update_peer(peer["id"].decode('hex'), infohash, peer["dtotal"]) self.previous_active_downloads = new_active_downloads if do_checkpoint: self.session.checkpoint_downloads() if self.state_cb_count % 4 == 0: if self.tunnel_community: self.tunnel_community.monitor_downloads(states_list) if self.credit_mining_manager: self.credit_mining_manager.monitor_downloads(states_list) return [] # # Persistence methods # def load_checkpoint(self): """ Called by any thread """ def do_load_checkpoint(): with self.session_lock: for i, filename in enumerate(iglob(os.path.join(self.session.get_downloads_pstate_dir(), '*.state'))): self.resume_download(filename, setupDelay=i * 0.1) if self.initComplete: do_load_checkpoint() else: self.register_task("load_checkpoint", reactor.callLater(1, do_load_checkpoint)) def load_download_pstate_noexc(self, infohash): """ Called by any thread, assume session_lock already held """ try: basename = binascii.hexlify(infohash) + '.state' filename = os.path.join(self.session.get_downloads_pstate_dir(), basename) if os.path.exists(filename): return self.load_download_pstate(filename) else: self._logger.info("%s not found", basename) except Exception: self._logger.exception("Exception while loading pstate: %s", infohash) def resume_download(self, filename, setupDelay=0): tdef = dscfg = pstate = None try: pstate = self.load_download_pstate(filename) # SWIFTPROC metainfo = pstate.get('state', 'metainfo') if 'infohash' in metainfo: tdef = TorrentDefNoMetainfo(metainfo['infohash'], metainfo['name'], metainfo.get('url', None)) else: tdef = TorrentDef.load_from_dict(metainfo) if pstate.has_option('download_defaults', 'saveas') and \ isinstance(pstate.get('download_defaults', 'saveas'), tuple): pstate.set('download_defaults', 'saveas', pstate.get('download_defaults', 'saveas')[-1]) dscfg = DownloadStartupConfig(pstate) except: # pstate is invalid or non-existing _, file = os.path.split(filename) infohash = binascii.unhexlify(file[:-6]) torrent_data = self.torrent_store.get(infohash) if torrent_data: try: tdef = TorrentDef.load_from_memory(torrent_data) defaultDLConfig = DefaultDownloadStartupConfig.getInstance() dscfg = defaultDLConfig.copy() if self.mypref_db is not None: dest_dir = self.mypref_db.getMyPrefStatsInfohash(infohash) if dest_dir and os.path.isdir(dest_dir): dscfg.set_dest_dir(dest_dir) except ValueError: self._logger.warning("tlm: torrent data invalid") if pstate is not None: has_resume_data = pstate.get('state', 'engineresumedata') is not None self._logger.debug("tlm: load_checkpoint: resumedata %s", 'len %s ' % len(pstate.get('state', 'engineresumedata')) if has_resume_data else 'None') if tdef and dscfg: if dscfg.get_dest_dir() != '': # removed torrent ignoring try: if self.download_exists(tdef.get_infohash()): self._logger.info("tlm: not resuming checkpoint because download has already been added") elif dscfg.get_credit_mining() and not self.session.config.get_credit_mining_enabled(): self._logger.info("tlm: not resuming checkpoint since token mining is disabled") else: self.add(tdef, dscfg, pstate, setupDelay=setupDelay) except Exception as e: self._logger.exception("tlm: load check_point: exception while adding download %s", tdef) else: self._logger.info("tlm: removing checkpoint %s destdir is %s", filename, dscfg.get_dest_dir()) os.remove(filename) else: self._logger.info("tlm: could not resume checkpoint %s %s %s", filename, tdef, dscfg) def checkpoint_downloads(self): """ Checkpoints all running downloads in Tribler. Even if the list of Downloads changes in the mean time this is no problem. For removals, dllist will still hold a pointer to the download, and additions are no problem (just won't be included in list of states returned via callback). """ downloads = self.downloads.values() deferred_list = [] self._logger.debug("tlm: checkpointing %s downloads", len(downloads)) for download in downloads: deferred_list.append(download.checkpoint()) return DeferredList(deferred_list) def shutdown_downloads(self): """ Shutdown all downloads in Tribler. """ for download in self.downloads.values(): download.stop() def remove_pstate(self, infohash): def do_remove(): if not self.download_exists(infohash): dlpstatedir = self.session.get_downloads_pstate_dir() # Remove checkpoint hexinfohash = binascii.hexlify(infohash) try: basename = hexinfohash + '.state' filename = os.path.join(dlpstatedir, basename) self._logger.debug("remove pstate: removing dlcheckpoint entry %s", filename) if os.access(filename, os.F_OK): os.remove(filename) except: # Show must go on self._logger.exception("Could not remove state") else: self._logger.warning("remove pstate: download is back, restarted? Canceling removal! %s", repr(infohash)) reactor.callFromThread(do_remove) @inlineCallbacks def early_shutdown(self): """ Called as soon as Session shutdown is initiated. Used to start shutdown tasks that takes some time and that can run in parallel to checkpointing, etc. :returns a Deferred that will fire once all dependencies acknowledge they have shutdown. """ self._logger.info("tlm: early_shutdown") self.shutdown_task_manager() # Note: session_lock not held self.shutdownstarttime = timemod.time() if self.credit_mining_manager: yield self.credit_mining_manager.shutdown() self.credit_mining_manager = None if self.torrent_checker: yield self.torrent_checker.shutdown() self.torrent_checker = None if self.channel_manager: yield self.channel_manager.shutdown() self.channel_manager = None if self.search_manager: yield self.search_manager.shutdown() self.search_manager = None if self.rtorrent_handler: yield self.rtorrent_handler.shutdown() self.rtorrent_handler = None if self.video_server: yield self.video_server.shutdown_server() self.video_server = None if self.version_check_manager: self.version_check_manager.stop() self.version_check_manager = None if self.resource_monitor: self.resource_monitor.stop() self.resource_monitor = None self.tracker_manager = None if self.tftp_handler is not None: yield self.tftp_handler.shutdown() self.tftp_handler = None if self.tunnel_community and self.trustchain_community: # We unload these overlays manually since the TrustChain has to be unloaded after the tunnel overlay. tunnel_community = self.tunnel_community self.tunnel_community = None yield self.ipv8.unload_overlay(tunnel_community) trustchain_community = self.trustchain_community self.trustchain_community = None yield self.ipv8.unload_overlay(trustchain_community) if self.dispersy: self._logger.info("lmc: Shutting down Dispersy...") now = timemod.time() try: success = yield self.dispersy.stop() except: print_exc() success = False diff = timemod.time() - now if success: self._logger.info("lmc: Dispersy successfully shutdown in %.2f seconds", diff) else: self._logger.info("lmc: Dispersy failed to shutdown in %.2f seconds", diff) if self.ipv8: yield self.ipv8.stop(stop_reactor=False) if self.metadata_store is not None: yield self.metadata_store.close() self.metadata_store = None if self.channelcast_db is not None: yield self.channelcast_db.close() self.channelcast_db = None if self.votecast_db is not None: yield self.votecast_db.close() self.votecast_db = None if self.mypref_db is not None: yield self.mypref_db.close() self.mypref_db = None if self.torrent_db is not None: yield self.torrent_db.close() self.torrent_db = None if self.peer_db is not None: yield self.peer_db.close() self.peer_db = None if self.mainline_dht is not None: from Tribler.Core.DecentralizedTracking import mainlineDHT yield mainlineDHT.deinit(self.mainline_dht) self.mainline_dht = None if self.torrent_store is not None: yield self.torrent_store.close() self.torrent_store = None if self.watch_folder is not None: yield self.watch_folder.stop() self.watch_folder = None # We close the API manager as late as possible during shutdown. if self.api_manager is not None: yield self.api_manager.stop() self.api_manager = None def network_shutdown(self): try: self._logger.info("tlm: network_shutdown") ts = enumerate_threads() self._logger.info("tlm: Number of threads still running %d", len(ts)) for t in ts: self._logger.info("tlm: Thread still running=%s, daemon=%s, instance=%s", t.getName(), t.isDaemon(), t) except: print_exc() # Stop network thread self.sessdoneflag.set() # Shutdown libtorrent session after checkpoints have been made if self.ltmgr is not None: self.ltmgr.shutdown() self.ltmgr = None def save_download_pstate(self, infohash, pstate): """ Called by network thread """ self.downloads[infohash].pstate_for_restart = pstate self.register_anonymous_task("save_pstate", self.downloads[infohash].save_resume_data()) def load_download_pstate(self, filename): """ Called by any thread """ pstate = CallbackConfigParser() pstate.read_file(filename) return pstate