コード例 #1
0
ファイル: community.py プロジェクト: csko/Tribler
    def __init__(self, cid, master_public_key):
        super(SimpleDispersyTestCommunity, self).__init__(cid, master_public_key)
        if __debug__: dprint(self._cid.encode("HEX"))

        # ensure that two of the hardcoder members (A, B, or C) has been picked
        cluster = self.get_meta_message(u"last-1-subjective-sync").destination.cluster
        subjective_set = self.get_subjective_set(self._my_member, cluster)
        assert subjective_set
        assert self._my_member.public_key in subjective_set
        def count():
            counter = 0
            for name, public_key in self.hardcoded_member_public_keys.iteritems():
                if public_key in subjective_set:
                    if __debug__: dprint("hardcoded member ", name, " found in my subjective set")
                    counter += 1
            return counter
        # if (1) we are not one of the hardcoded members and (2) we did not yet pick hardcoded
        # members for our subjective set
        if not self._my_member.public_key in self.hardcoded_member_public_keys.values() and count() < 2:
            assert count() == 0
            assert len(self.hardcoded_member_public_keys) == 3
            keys = self.hardcoded_member_public_keys.values()
            shuffle(keys)
            self.create_dispersy_subjective_set(cluster, [self._my_member, self.get_member(keys[0]), self.get_member(keys[1])])
            subjective_set = self.get_subjective_set(self._my_member, cluster)
            assert count() == 2

        self._status = get_status_holder("dispersy-simple-dispersy-test")
        self._status.add_reporter(TUDelftReporter(REPORTER_NAME, 300, self._my_member.public_key))
        self._status.create_and_add_event("__init__^" + self._cid.encode("HEX"), ["last-1-subjective-sync"])
        self._status.create_and_add_event("info^" + self._cid.encode("HEX"), [self._dispersy.info()])
        self._status.create_and_add_event("subjective_set^" + self._cid.encode("HEX"), [(name, public_key in subjective_set) for name, public_key in self.hardcoded_member_public_keys.iteritems()])
        self._status.report_now()
        self._dispersy.callback.register(self._periodically_info, delay=60.0)
コード例 #2
0
ファイル: community.py プロジェクト: egbertbouman/tribler-g
    def __init__(self, cid, master_public_key):
        super(SimpleDispersyTestCommunity, self).__init__(cid, master_public_key)
        if __debug__: dprint(self._cid.encode("HEX"))

        self._status = get_status_holder("dispersy-simple-dispersy-test")
        self._status.add_reporter(TUDelftReporter(REPORTER_NAME, 300, self._my_member.public_key))
        self._status.create_and_add_event("__init__^" + self._cid.encode("HEX"), ["full-sync", "last-1-sync"])
        self._status.create_and_add_event("info^" + self._cid.encode("HEX"), [self._dispersy.info()])
        self._status.report_now()
        self._dispersy.callback.register(self._periodically_info, delay=60.0)
コード例 #3
0
    def __init__(self, infohash, storage, picker, backlog, max_rate_period,
                 numpieces, chunksize, measurefunc, snub_time,
                 kickbans_ok, kickfunc, banfunc, bt1dl, scheduler = None):
        self.infohash = infohash
        self.b64_infohash = b64encode(infohash)
        self.storage = storage
        self.picker = picker
        self.backlog = backlog
        self.max_rate_period = max_rate_period
        self.measurefunc = measurefunc
        self.totalmeasure = Measure(max_rate_period*storage.piece_length/storage.request_size)
        self.numpieces = numpieces
        self.chunksize = chunksize
        self.snub_time = snub_time
        self.kickfunc = kickfunc
        self.banfunc = banfunc
        self.disconnectedseeds = {}
        self.downloads = []
        self.perip = {}
        self.gotbaddata = {}
        self.kicked = {}
        self.banned = {}
        self.kickbans_ok = kickbans_ok
        self.kickbans_halted = False
        self.super_seeding = False
        self.endgamemode = False
        self.endgame_queued_pieces = []
        self.all_requests = []
        self.discarded = 0L
        self.download_rate = 0
#        self.download_rate = 25000  # 25K/s test rate
        self.bytes_requested = 0
        self.last_time = clock()
        self.queued_out = {}
        self.requeueing = False
        self.paused = False
        self.scheduler = scheduler
        # ProxyService_
        #
        self.bt1dl = bt1dl
        self.proxydownloader = None
        #
        # _ProxyService

        # hack: we should not import this since it is not part of the
        # core nor should we import here, but otherwise we will get
        # import errors
        #
        # _event_reporter stores events that are logged somewhere...
        # from Tribler.Core.Statistics.StatusReporter import get_reporter_instance
        # self._event_reporter = get_reporter_instance()
        self._event_reporter = get_status_holder("LivingLab")

        # check periodicaly
        self.scheduler(self.dlr_periodic_check, 1)
コード例 #4
0
ファイル: Uploader.py プロジェクト: ebcabaybay/swiftarm
    def get_upload_chunk(self):
        if self.choked or not self.buffer:
            return None
        index, begin, length = self.buffer.pop(0)
        if self.config['buffer_reads']:
            if index != self.piecedl:
                if self.piecebuf:
                    self.piecebuf.release()
                self.piecedl = index
                # Merkle
                [self.piecebuf,
                 self.hashlist] = self.storage.get_piece(index, 0, -1)
            try:
                piece = self.piecebuf[begin:begin + length]
                assert len(piece) == length
            except:  # fails if storage.get_piece returns None or if out of range
                self.connection.close()
                return None
            if begin == 0:
                hashlist = self.hashlist
            else:
                hashlist = []
        else:
            if self.piecebuf:
                self.piecebuf.release()
                self.piecedl = None
            [piece, hashlist] = self.storage.get_piece(index, begin, length)
            if piece is None:
                self.connection.close()
                return None
        self.measure.update_rate(len(piece))
        self.totalup.update_rate(len(piece))

        status = get_status_holder("LivingLab")
        s_upload = status.get_or_create_status_element("uploaded", 0)
        s_upload.inc(len(piece))

        # BarterCast counter
        self.connection.total_uploaded += length

        return (index, begin, hashlist, piece)
コード例 #5
0
ファイル: Uploader.py プロジェクト: egbertbouman/tribler-g
    def get_upload_chunk(self):
        if self.choked or not self.buffer:
            return None
        index, begin, length = self.buffer.pop(0)
        if self.config['buffer_reads']:
            if index != self.piecedl:
                if self.piecebuf:
                    self.piecebuf.release()
                self.piecedl = index
                # Merkle
                [ self.piecebuf, self.hashlist ] = self.storage.get_piece(index, 0, -1)
            try:
                piece = self.piecebuf[begin:begin+length]
                assert len(piece) == length
            except:     # fails if storage.get_piece returns None or if out of range
                self.connection.close()
                return None
            if begin == 0:
                hashlist = self.hashlist
            else:
                hashlist = []
        else:
            if self.piecebuf:
                self.piecebuf.release()
                self.piecedl = None
            [piece, hashlist] = self.storage.get_piece(index, begin, length)
            if piece is None:
                self.connection.close()
                return None
        self.measure.update_rate(len(piece))
        self.totalup.update_rate(len(piece))

        status = get_status_holder("LivingLab")
        s_upload = status.get_or_create_status_element("uploaded",0)
        s_upload.inc(len(piece))

        # BarterCast counter
        self.connection.total_uploaded += length
        
        return (index, begin, hashlist, piece)
コード例 #6
0
ファイル: Doe.py プロジェクト: egbertbouman/tribler-g
    def send_download_piece(self, piece, proxy_permid):
        """ Send a message to request the piece to the proxy

        TODO: update the line below 
        Called by _request() in ProxyDownloader
        
        @param piece: The piece that will be requested to one of the proxies
        @param proxy_permid: The permid of the proxy that will be requested for the piece
        """
        if DEBUG:
            print >>sys.stderr, "doe: send_request_piece: will send a request for piece", piece, "to", show_permid_short(proxy_permid)
                
        try:
            # Store the proxy identification data and the piece requested to it
            if proxy_permid in self.requested_pieces.keys():
                # The peer is already in the dictionary: a previous request was sent to it
                # Check if the piece was not requested before
                if piece in self.requested_pieces[proxy_permid]:
                    # The piece has already been requested to that proxy. No re-requests in this version
                    if DEBUG:
                        print >> sys.stderr, "doe: send_request_piece: piece", piece, "was already requested to this proxy before"
                    return
                self.requested_pieces[proxy_permid].append(piece)
            else:
                # The peer is not in the dictionary: no previous requests were sent to it
                self.requested_pieces[proxy_permid] = deque([piece])

            # Sent the request message to the proxy
            olthread_send_download_piece_lambda = lambda:self.olthread_send_download_piece(piece, proxy_permid)
            self.overlay_bridge.add_task(olthread_send_download_piece_lambda,0)
            
            # ProxyService 90s Test_
            from Tribler.Core.Statistics.Status.Status import get_status_holder
            status = get_status_holder("Proxy90secondsTest")
            status.create_and_add_event("requested-piece-to-proxy", [show_permid_short(proxy_permid), piece])
            # _ProxyService 90s Test
            
        except Exception,e:
            print_exc()
            print >> sys.stderr, "doe: Exception while requesting piece", piece, e
コード例 #7
0
ファイル: Doe.py プロジェクト: egbertbouman/tribler-g
    def got_piece_data(self, permid, selversion, piece, piece_data):
        """ Find the SingleDownload object for the sending permid and pass the data to it.
        
        @param permid: The permid of the node sending the message
        @param selversion: selected Overlay protocol version
        @param piece: The piece number that is sent 
        @param piece_data: The piece data that is sent
        """
        if DEBUG:
            print >> sys.stderr, "doe: received a PIECE_DATA message from", show_permid_short(permid)

        # Search for the SingleDownload object that has the connection with this peer
        if DEBUG:
            debug_found_connection = False
        
        for single_dl in self.proxydownloader.downloads:
            if permid == single_dl.proxy_permid:
                # If the connection is found, add the piece_list information to the d.have information
                single_dl.received_data[piece] = piece_data
                single_dl.request_finished(piece)

                # ProxyService 90s Test_
                from Tribler.Core.Statistics.Status.Status import get_status_holder
                status = get_status_holder("Proxy90secondsTest")
                status.create_and_add_event("downloaded-piece", [piece, show_permid_short(permid)])
                # _ProxyService 90s Test
                
                if DEBUG:
                    debug_found_connection = True
                break

        if DEBUG:
            if debug_found_connection:
                print >> sys.stderr, "doe: got_piece_data: found a data connection for the received PIECE_DATA"
            else:
                print >> sys.stderr, "doe: got_piece_data: no data connection for the received PIECE_DATA has been found"
コード例 #8
0
ファイル: Downloader.py プロジェクト: ebcabaybay/swiftarm
    def got_piece(self, index, begin, hashlist, piece):
        """
        Returns True if the piece is complete.
        Note that in this case a -piece- means a chunk!
        """

        if self.bad_performance_counter:
            self.bad_performance_counter -= 1
            if DEBUG:
                print >> sys.stderr, "decreased bad_performance_counter to", self.bad_performance_counter

        length = len(piece)
        #if DEBUG:
        #    print >> sys.stderr, 'Downloader: got piece of length %d' % length
        try:
            self.active_requests.remove((index, begin, length))
        except ValueError:
            self.downloader.discarded += length
            return False
        if self.downloader.endgamemode:
            self.downloader.all_requests.remove((index, begin, length))
            if DEBUG:
                print >> sys.stderr, "Downloader: got_piece: removed one request from all_requests", len(
                    self.downloader.all_requests), "remaining"

        self.last = clock()
        self.last2 = clock()
        self.measure.update_rate(length)
        # Update statistic gatherer
        status = get_status_holder("LivingLab")
        s_download = status.get_or_create_status_element("downloaded", 0)
        s_download.inc(length)

        self.short_term_measure.update_rate(length)
        self.downloader.measurefunc(length)
        if not self.downloader.storage.piece_came_in(index, begin, hashlist,
                                                     piece, self.guard):
            self.downloader.piece_flunked(index)
            return False

        # boudewijn: we need more accurate (if possibly invalid)
        # measurements on current download speed
        self.downloader.picker.got_piece(index, begin, length)

        if self.downloader.storage.do_I_have(index):
            # The piece (actual piece, not chunk) is complete
            self.downloader.picker.complete(index)

            # ProxyService_
            #
            if self.downloader.proxydownloader:
                if DEBUG:
                    print >> sys.stderr, "downloader: got_piece. Searching if piece", index, "was requested by a doe node."
                if index in self.downloader.proxydownloader.proxy.currently_downloading_pieces:
                    # get_piece(index, 0, -1) returns the complete piece data
                    [piece_data, hash_list
                     ] = self.downloader.storage.get_piece(index, 0, -1)
                    self.downloader.proxydownloader.proxy.retrieved_piece(
                        index, piece_data)
            #
            # _ProxyService

        if self.downloader.endgamemode:
            for d in self.downloader.downloads:
                if d is not self:
                    if d.interested:
                        if d.choked:
                            assert not d.active_requests
                            d.fix_download_endgame()
                        else:
                            try:
                                d.active_requests.remove(
                                    (index, begin, length))
                            except ValueError:
                                continue
                            d.connection.send_cancel(index, begin, length)
                            d.fix_download_endgame()
                    else:
                        assert not d.active_requests
        self._request_more()
        self.downloader.check_complete(index)

        # BarterCast counter
        self.connection.total_downloaded += length

        return self.downloader.storage.do_I_have(index)
コード例 #9
0
            events = [{
                "name": event.get_name(),
                "time": event.get_time(),
                "values": event.get_values()
            } for event in events]
            data = (time(), self.device_id.encode("HEX"), events)
            compressed = compress(encode(data))
            if DEBUG:
                print >> sys.stderr, "TUDelftReporter: posting", len(
                    compressed), "bytes payload"
            self.post(compressed)
        else:
            if DEBUG: print >> sys.stderr, "TUDelftReporter: Nothing to report"


if __debug__:
    if __name__ == "__main__":
        from Tribler.Core.Statistics.Status.Status import get_status_holder

        status = get_status_holder("dispersy-simple-dispersy-test")
        status.add_reporter(
            TUDelftReporter("Periodically flush events to TUDelft", 5,
                            "blabla"))
        status.create_and_add_event("foo", ["foo", "bar"])
        status.create_and_add_event("animals",
                                    ["bunnies", "kitties", "doggies"])
        status.create_and_add_event("numbers", range(255))

        from time import sleep
        sleep(15)
コード例 #10
0
    def __init__(self, connecter, raw_server, my_id, max_len,
            schedulefunc, keepalive_delay, download_id, 
            measurefunc, config):
        self.raw_server = raw_server
        self.connecter = connecter
        self.my_id = my_id
        self.max_len = max_len
        self.schedulefunc = schedulefunc
        self.keepalive_delay = keepalive_delay
        self.download_id = download_id
        self.measurefunc = measurefunc
        self.config = config
        self.connections = {}
        self.banned = {}
        self.to_connect = set()
        self.trackertime = None
        self.scheduled_request_new_peers = False
        
        self.paused = False
        if self.config['max_connections'] == 0:
            self.max_connections = 100
        else:
            self.max_connections = self.config['max_connections']
        """
        In r529 there was a problem when a single Windows client 
        would connect to our text-based seeder (i.e. btlaunchmany) 
        with no other clients present. Apparently both the seeder 
        and client would connect to eachother simultaneously, but 
        not end up with a good connection, halting the client.

        Arno, 2006-03-10: Reappears in ~r890, fixed in r892. It 
        appears to be a problem of writing to a nonblocking socket 
        before it signalled it is ready for writing, although the 
        evidence is inconclusive. 

        Arno: 2006-12-15: Reappears in r2319. There is some weird
        socket problem here. Using Python 2.4.4 doesn't solve it.
        The problem I see here is that as soon as we register
        at the tracker, the single seeder tries to connect to
        us. He succeeds, but after a short while the connection
        appears to be closed by him. We then wind up with no
        connection at all and have to wait until we recontact
        the tracker.

        My workaround is to refuse these initial connections from
        the seeder and wait until I've started connecting to peers
        based on the info I got from the tracker before accepting
        remote connections.
        
        Arno: 2007-02-16: I think I finally found it. The Tribler 
        tracker (BitTornado/BT1/track.py) will do a NAT check
        (BitTornado/BT1/NATCheck) by default, which consists of
        initiating a connection and then closing it after a good 
        BT handshake was received.
        
        The solution now is to make sure we check IP and port to
        identify existing connections. I already added that 2006-12-15,
        so I just removed the restriction on initial connections, 
        which are superfluous.
        """
        self.rerequest = None
        # ProxyService_
        #
        self.proxy = None
        #
        # _ProxyService        

        # hack: we should not import this since it is not part of the
        # core nor should we import here, but otherwise we will get
        # import errors
        #
        # _event_reporter stores events that are logged somewhere...
        # from Tribler.Core.Statistics.StatusReporter import get_reporter_instance
        self._event_reporter = get_status_holder("LivingLab")

        # the addresses that have already been reported
        self._known_addresses = {}

        schedulefunc(self.send_keepalives, keepalive_delay)
        
        # RePEX: added repexer field.
        # Note: perhaps call it observer in the future and make the 
        # download engine more observable?
        self.repexer = None
コード例 #11
0
ファイル: tribler.py プロジェクト: Anbcorp/tribler
    def __init__(self, params, single_instance_checker, installdir):
        self.params = params
        self.single_instance_checker = single_instance_checker
        self.installdir = self.configure_install_dir(installdir)

        self.state_dir = None
        self.error = None
        self.last_update = 0
        self.ready = False
        self.done = False
        self.frame = None

        self.guiserver = GUITaskQueue.getInstance()
        self.said_start_playback = False
        self.decodeprogress = 0

        self.old_reputation = 0

        # DISPERSY will be set when available
        self.dispersy = None
        # BARTER_COMMUNITY will be set when both Dispersy and the EffortCommunity are available
        self.barter_community = None

        self.seedingmanager = None
        self.i2is = None
        self.torrentfeed = None
        self.webUI = None
        self.utility = None
        self.videoplayer = None

        try:
            bm = wx.Bitmap(os.path.join(self.installdir, 'Tribler', 'Main', 'vwxGUI', 'images', 'splash.png'), wx.BITMAP_TYPE_ANY)
            self.splash = GaugeSplash(bm)
            self.splash.setTicks(10)
            self.splash.Show()

            print >> sys.stderr, 'Client Starting Up.'
            print >> sys.stderr, "Tribler is using", self.installdir, "as working directory"

            self.splash.tick('Starting API')
            s = self.startAPI(self.splash.tick)

            print >> sys.stderr, "Tribler is expecting swift in", self.sconfig.get_swift_path()

            self.dispersy = s.lm.dispersy

            self.utility = Utility(self.installdir, s.get_state_dir())
            self.utility.app = self
            self.utility.session = s
            self.guiUtility = GUIUtility.getInstance(self.utility, self.params, self)
            GUIDBProducer.getInstance(self.dispersy.callback)

            print >> sys.stderr, 'Tribler Version:', self.utility.lang.get('version'), ' Build:', self.utility.lang.get('build')

            self.splash.tick('Loading userdownloadchoice')
            from Tribler.Main.vwxGUI.UserDownloadChoice import UserDownloadChoice
            UserDownloadChoice.get_singleton().set_session_dir(s.get_state_dir())

            self.splash.tick('Initializing Family Filter')
            cat = Category.getInstance()

            state = self.utility.config.Read('family_filter')
            if state in ('1', '0'):
                cat.set_family_filter(state == '1')
            else:
                self.utility.config.Write('family_filter', '1')
                self.utility.config.Flush()

                cat.set_family_filter(True)

            # Create global rate limiter
            self.splash.tick('Setting up ratelimiters')
            self.ratelimiter = UserDefinedMaxAlwaysOtherwiseDividedOverActiveSwarmsRateManager()

            # Counter to suppress some event from occurring
            self.ratestatecallbackcount = 0

            # So we know if we asked for peer details last cycle
            self.lastwantpeers = []

            # boudewijn 01/04/2010: hack to fix the seedupload speed that
            # was never used and defaulted to 0 (unlimited upload)
            maxup = self.utility.config.Read('maxuploadrate', "int")
            if maxup == -1:  # no upload
                self.ratelimiter.set_global_max_speed(UPLOAD, 0.00001)
                self.ratelimiter.set_global_max_seedupload_speed(0.00001)
            else:
                self.ratelimiter.set_global_max_speed(UPLOAD, maxup)
                self.ratelimiter.set_global_max_seedupload_speed(maxup)

            maxdown = self.utility.config.Read('maxdownloadrate', "int")
            self.ratelimiter.set_global_max_speed(DOWNLOAD, maxdown)

            self.seedingmanager = GlobalSeedingManager(self.utility.config.Read)

            # Only allow updates to come in after we defined ratelimiter
            self.prevActiveDownloads = []
            s.set_download_states_callback(self.sesscb_states_callback)

            # Schedule task for checkpointing Session, to avoid hash checks after
            # crashes.
            self.guiserver.add_task(self.guiservthread_checkpoint_timer, SESSION_CHECKPOINT_INTERVAL)

            self.utility.postAppInit(os.path.join(self.installdir, 'Tribler', 'Main', 'vwxGUI', 'images', 'tribler.ico'))

            # Put it here so an error is shown in the startup-error popup
            # Start server for instance2instance communication
            self.i2iconnhandler = InstanceConnectionHandler(self.i2ithread_readlinecallback)
            self.i2is = Instance2InstanceServer(I2I_LISTENPORT, self.i2iconnhandler)
            self.i2is.start()

            # Arno, 2010-01-15: VLC's reading behaviour of doing open-ended
            # Range: GETs causes performance problems in our code. Disable for now.
            # Arno, 2010-01-22: With the addition of a CachingStream the problem
            # is less severe (see VideoPlayer), so keep GET Range enabled.
            #
            # SimpleServer.RANGE_REQUESTS_ENABLED = False

            # Fire up the VideoPlayer, it abstracts away whether we're using
            # an internal or external video player.
            playbackmode = self.utility.config.Read('videoplaybackmode', "int")
            self.videoplayer = VideoPlayer.getInstance(httpport=VIDEOHTTP_LISTENPORT)
            self.videoplayer.register(self.utility, preferredplaybackmode=playbackmode)

            notification_init(self.utility)
            self.guiUtility.register()

            channel_only = os.path.exists(os.path.join(self.installdir, 'joinchannel'))
            if channel_only:
                f = open(os.path.join(self.installdir, 'joinchannel'), 'rb')
                channel_only = f.readline()
                f.close()

            self.frame = MainFrame(None, channel_only, PLAYBACKMODE_INTERNAL in return_feasible_playback_modes(self.utility.getPath()), self.splash.tick)

            # Arno, 2011-06-15: VLC 1.1.10 pops up separate win, don't have two.
            self.frame.videoframe = None
            if PLAYBACKMODE_INTERNAL in return_feasible_playback_modes(self.utility.getPath()):
                vlcwrap = self.videoplayer.get_vlcwrap()

                self.frame.videoframe = VideoDummyFrame(self.frame.videoparentpanel, self.utility, vlcwrap)
                self.videoplayer.set_videoframe(self.frame.videoframe)

            if sys.platform == 'win32':
                wx.CallAfter(self.frame.top_bg.Refresh)
                wx.CallAfter(self.frame.top_bg.Layout)
            else:
                self.frame.top_bg.Layout()

            # Arno, 2007-05-03: wxWidgets 2.8.3.0 and earlier have the MIME-type for .bmp
            # files set to 'image/x-bmp' whereas 'image/bmp' is the official one.
            try:
                bmphand = None
                hands = wx.Image.GetHandlers()
                for hand in hands:
                    # print "Handler",hand.GetExtension(),hand.GetType(),hand.GetMimeType()
                    if hand.GetMimeType() == 'image/x-bmp':
                        bmphand = hand
                        break
                # wx.Image.AddHandler()
                if bmphand is not None:
                    bmphand.SetMimeType('image/bmp')
            except:
                # wx < 2.7 don't like wx.Image.GetHandlers()
                print_exc()

            self.splash.Destroy()
            self.frame.Show(True)

            self.torrentfeed = RssParser.getInstance()

            self.webUI = None
            if self.utility.config.Read('use_webui', "boolean"):
                try:
                    from Tribler.Main.webUI.webUI import WebUI
                    self.webUI = WebUI.getInstance(self.guiUtility.library_manager, self.guiUtility.torrentsearch_manager, self.utility.config.Read('webui_port', "int"))
                    self.webUI.start()
                except Exception:
                    print_exc()

            wx.CallAfter(self.PostInit2)

            # 08/02/10 Boudewijn: Working from home though console
            # doesn't allow me to press close.  The statement below
            # gracefully closes Tribler after 120 seconds.
            # wx.CallLater(120*1000, wx.GetApp().Exit)

            status = get_status_holder("LivingLab")
            status.add_reporter(NullReporter("Periodically remove all events", 0))
# status.add_reporter(LivingLabPeriodicReporter("Living lab CS reporter", 300, "Tribler client")) # Report every 5 minutes
# status.add_reporter(LivingLabPeriodicReporter("Living lab CS reporter", 30, "Tribler client")) # Report every 30 seconds - ONLY FOR TESTING

            # report client version
            status.create_and_add_event("client-startup-version", [self.utility.lang.get("version")])
            status.create_and_add_event("client-startup-build", [self.utility.lang.get("build")])
            status.create_and_add_event("client-startup-build-date", [self.utility.lang.get("build_date")])

            self.ready = True

        except Exception as e:
            self.onError(e)
            return False
コード例 #12
0
    def __init__(self, infohash, metainfo, kvconfig, multihandler, get_extip_func, listenport, videoanalyserpath, vodfileindex, set_error_func, pstate, lmvodeventcallback, lmhashcheckcompletecallback, dlinstance):
        self.dow = None
        self.set_error_func = set_error_func
        self.videoinfo = None
        self.videostatus = None
        self.lmvodeventcallback = lmvodeventcallback
        self.lmhashcheckcompletecallback = lmhashcheckcompletecallback
        self.logmsgs = []
        self._hashcheckfunc = None
        self._getstatsfunc = None
        self.infohash = infohash
        self.b64_infohash = b64encode(infohash)
        self.repexer = None
        # ProxyService_
        #
        self.dlinstance = dlinstance
        #
        # _proxyService
        
        
        try:
            self.dldoneflag = Event()
            self.dlrawserver = multihandler.newRawServer(infohash,self.dldoneflag)
            self.lmvodeventcallback = lmvodeventcallback
    
            if pstate is not None:
                self.hashcheckfrac = pstate['dlstate']['progress']
            else:
                self.hashcheckfrac = 0.0
    
            self.peerid = createPeerID()
            
            # LOGGING
            event_reporter = get_status_holder("LivingLab")
            event_reporter.create_and_add_event("peerid", [self.b64_infohash, b64encode(self.peerid)])
            
            #print >>sys.stderr,"SingleDownload: __init__: My peer ID is",`peerid`
    
            self.dow = BT1Download(self.hashcheckprogressfunc,
                            self.finishedfunc,
                            self.fatalerrorfunc, 
                            self.nonfatalerrorfunc,
                            self.logerrorfunc,
                            self.dldoneflag,
                            kvconfig,
                            metainfo, 
                            infohash,
                            self.peerid,
                            self.dlrawserver,
                            get_extip_func,
                            listenport,
                            videoanalyserpath,
                            self.dlinstance
                            )
        
            file = self.dow.saveAs(self.save_as)
            #if DEBUG:
            #    print >>sys.stderr,"SingleDownload: dow.saveAs returned",file
            
            # Set local filename in vodfileindex
            if vodfileindex is not None:
                # Ric: for SVC the index is a list of indexes
                index = vodfileindex['index']
                if type(index) == ListType:
                    svc = len(index) > 1
                else:
                    svc = False
                
                if svc:
                    outpathindex = self.dow.get_dest(index[0])
                else:
                    if index == -1:
                        index = 0
                    outpathindex = self.dow.get_dest(index)

                vodfileindex['outpath'] = outpathindex
                self.videoinfo = vodfileindex
                if 'live' in metainfo['info']:
                    authparams = metainfo['info']['live']
                else:
                    authparams = None
                if svc:
                    self.videostatus = SVCVideoStatus(metainfo['info']['piece length'],self.dow.files,vodfileindex,authparams)
                else:
                    self.videostatus = VideoStatus(metainfo['info']['piece length'],self.dow.files,vodfileindex,authparams)
                self.videoinfo['status'] = self.videostatus
                self.dow.set_videoinfo(vodfileindex,self.videostatus)

            #if DEBUG:
            #    print >>sys.stderr,"SingleDownload: setting vodfileindex",vodfileindex
            
            # RePEX: Start in RePEX mode
            if kvconfig['initialdlstatus'] == DLSTATUS_REPEXING:
                if pstate is not None and pstate.has_key('dlstate'):
                    swarmcache = pstate['dlstate'].get('swarmcache',{})
                else:
                    swarmcache = {}
                self.repexer = RePEXer(self.infohash, swarmcache)
            else:
                self.repexer = None
            
            if pstate is None:
                resumedata = None
            else:
                # Restarting download
                resumedata=pstate['engineresumedata']
            self._hashcheckfunc = self.dow.initFiles(resumedata=resumedata)

            
        except Exception,e:
            self.fatalerrorfunc(e)
コード例 #13
0
    def got_piece(self, index, begin, hashlist, piece):
        """
        Returns True if the piece is complete.
        Note that in this case a -piece- means a chunk!
        """

        if self.bad_performance_counter:
            self.bad_performance_counter -= 1
            if DEBUG: print >>sys.stderr, "decreased bad_performance_counter to", self.bad_performance_counter

        length = len(piece)
        #if DEBUG:
        #    print >> sys.stderr, 'Downloader: got piece of length %d' % length
        try:
            self.active_requests.remove((index, begin, length))
        except ValueError:
            self.downloader.discarded += length
            return False
        if self.downloader.endgamemode:
            self.downloader.all_requests.remove((index, begin, length))
            if DEBUG: print >>sys.stderr, "Downloader: got_piece: removed one request from all_requests", len(self.downloader.all_requests), "remaining"

        self.last = clock()
        self.last2 = clock()
        self.measure.update_rate(length)
        # Update statistic gatherer
        status = get_status_holder("LivingLab")
        s_download = status.get_or_create_status_element("downloaded",0)
        s_download.inc(length)
        
        self.short_term_measure.update_rate(length)
        self.downloader.measurefunc(length)
        if not self.downloader.storage.piece_came_in(index, begin, hashlist, piece, self.guard):
            self.downloader.piece_flunked(index)
            return False

        # boudewijn: we need more accurate (if possibly invalid)
        # measurements on current download speed
        self.downloader.picker.got_piece(index, begin, length)

        if self.downloader.storage.do_I_have(index):
            # The piece (actual piece, not chunk) is complete
            self.downloader.picker.complete(index)
            
            # ProxyService_
            #
            if self.downloader.proxydownloader:
                if DEBUG:
                    print >>sys.stderr, "downloader: got_piece. Searching if piece", index, "was requested by a doe node."
                if index in self.downloader.proxydownloader.proxy.currently_downloading_pieces:
                    # get_piece(index, 0, -1) returns the complete piece data
                    [piece_data, hash_list] = self.downloader.storage.get_piece(index, 0, -1)
                    self.downloader.proxydownloader.proxy.retrieved_piece(index, piece_data)
            #
            # _ProxyService

        if self.downloader.endgamemode:
            for d in self.downloader.downloads:
                if d is not self:
                    if d.interested:
                        if d.choked:
                            assert not d.active_requests
                            d.fix_download_endgame()
                        else:
                            try:
                                d.active_requests.remove((index, begin, length))
                            except ValueError:
                                continue
                            d.connection.send_cancel(index, begin, length)
                            d.fix_download_endgame()
                    else:
                        assert not d.active_requests
        self._request_more()
        self.downloader.check_complete(index)
        
        # BarterCast counter
        self.connection.total_downloaded += length
    
        return self.downloader.storage.do_I_have(index)
コード例 #14
0
    def __init__(self, infohash, metainfo, kvconfig, multihandler, get_extip_func, listenport, videoanalyserpath, vodfileindex, set_error_func, pstate, lmvodeventcallback, lmhashcheckcompletecallback, dlinstance):
        self.dow = None
        self.set_error_func = set_error_func
        self.videoinfo = None
        self.videostatus = None
        self.lmvodeventcallback = lmvodeventcallback
        self.lmhashcheckcompletecallback = lmhashcheckcompletecallback
        self.logmsgs = []
        self._hashcheckfunc = None
        self._getstatsfunc = None
        self.infohash = infohash
        self.b64_infohash = b64encode(infohash)
        self.repexer = None
        # ProxyService_
        #
        self.dlinstance = dlinstance
        #
        # _proxyService
        
        
        try:
            self.dldoneflag = Event()
            self.dlrawserver = multihandler.newRawServer(infohash,self.dldoneflag)
            self.lmvodeventcallback = lmvodeventcallback
    
            if pstate is not None:
                self.hashcheckfrac = pstate['dlstate']['progress']
            else:
                self.hashcheckfrac = 0.0
    
            self.peerid = createPeerID()
            
            # LOGGING
            event_reporter = get_status_holder("LivingLab")
            event_reporter.create_and_add_event("peerid", [self.b64_infohash, b64encode(self.peerid)])
            
            #print >>sys.stderr,"SingleDownload: __init__: My peer ID is",`peerid`
    
            self.dow = BT1Download(self.hashcheckprogressfunc,
                            self.finishedfunc,
                            self.fatalerrorfunc, 
                            self.nonfatalerrorfunc,
                            self.logerrorfunc,
                            self.dldoneflag,
                            kvconfig,
                            metainfo, 
                            infohash,
                            self.peerid,
                            self.dlrawserver,
                            get_extip_func,
                            listenport,
                            videoanalyserpath,
                            self.dlinstance
                            )
        
            file = self.dow.saveAs(self.save_as)
            #if DEBUG:
            #    print >>sys.stderr,"SingleDownload: dow.saveAs returned",file
            
            # Set local filename in vodfileindex
            if vodfileindex is not None:
                # Ric: for SVC the index is a list of indexes
                index = vodfileindex['index']
                if type(index) == ListType:
                    svc = len(index) > 1
                else:
                    svc = False
                
                if svc:
                    outpathindex = self.dow.get_dest(index[0])
                else:
                    if index == -1:
                        index = 0
                    outpathindex = self.dow.get_dest(index)

                vodfileindex['outpath'] = outpathindex
                self.videoinfo = vodfileindex
                if 'live' in metainfo['info']:
                    authparams = metainfo['info']['live']
                else:
                    authparams = None
                if svc:
                    self.videostatus = SVCVideoStatus(metainfo['info']['piece length'],self.dow.files,vodfileindex,authparams)
                else:
                    self.videostatus = VideoStatus(metainfo['info']['piece length'],self.dow.files,vodfileindex,authparams)
                self.videoinfo['status'] = self.videostatus
                self.dow.set_videoinfo(vodfileindex,self.videostatus)

            #if DEBUG:
            #    print >>sys.stderr,"SingleDownload: setting vodfileindex",vodfileindex
            
            # RePEX: Start in RePEX mode
            if kvconfig['initialdlstatus'] == DLSTATUS_REPEXING:
                if pstate is not None and pstate.has_key('dlstate'):
                    swarmcache = pstate['dlstate'].get('swarmcache',{})
                else:
                    swarmcache = {}
                self.repexer = RePEXer(self.infohash, swarmcache)
            else:
                self.repexer = None
            
            if pstate is None:
                resumedata = None
            else:
                # Restarting download
                resumedata=pstate['engineresumedata']
            self._hashcheckfunc = self.dow.initFiles(resumedata=resumedata)

            
        except Exception,e:
            self.fatalerrorfunc(e)
コード例 #15
0
ファイル: TUDelftReporter.py プロジェクト: Anbcorp/tribler
        LivingLabPeriodicReporter.__init__(self, name, frequency, public_key)
        # note: public_key is set to self.device_id

    def report(self):
        if DEBUG:
            print >> sys.stderr, "TUDelftReporter: report"
        events = self.get_events()
        if events:
            events = [{"name": event.get_name(), "time": event.get_time(), "values": event.get_values()} for event in events]
            data = (time(), self.device_id.encode("HEX"), events)
            compressed = compress(encode(data))
            if DEBUG:
                print >> sys.stderr, "TUDelftReporter: posting", len(compressed), "bytes payload"
            self.post(compressed)
        else:
            if DEBUG:
                print >> sys.stderr, "TUDelftReporter: Nothing to report"

if __debug__:
    if __name__ == "__main__":
        from Tribler.Core.Statistics.Status.Status import get_status_holder

        status = get_status_holder("dispersy-simple-dispersy-test")
        status.add_reporter(TUDelftReporter("Periodically flush events to TUDelft", 5, "blabla"))
        status.create_and_add_event("foo", ["foo", "bar"])
        status.create_and_add_event("animals", ["bunnies", "kitties", "doggies"])
        status.create_and_add_event("numbers", range(255))

        from time import sleep
        sleep(15)
コード例 #16
0
ファイル: StatusReporter.py プロジェクト: Anaconda84/Anaconda
    def _reporting_thread(self):
        """
        Send the report on a separate thread

        We choose not to use a lock object to protect access to
        self._enable_reporting, self._retry_delay, and
        self._report_deadline because only a single thread will write
        and the other threads will only read there variables. Python
        doesn't cause problems in this case.
        """
        # minimum retry delay. this value will grow exponentially with
        # every failure
        retry_delay = 15

        # the amount of time to sleep before the next report (or until
        # the _thread_event is set)
        timeout = retry_delay

        # a list containing all urlencoded reports that have yet been
        # send (most of the time this list will be empty, except when
        # reports could not be delivered)
        reports = []

        # local copy of the self._event when it is being reported
        event = None
        
        if USE_LIVING_LAB_REPORTING:
            # the m18 trial statistics are gathered at the 'living lab'
            session = Session.get_instance()
            living_lab_reporter = LivingLabOnChangeReporter("vod-stats-reporter")
            living_lab_reporter.set_permid(session.get_permid())
            status_holder = get_status_holder("vod-stats")
            status_holder.add_reporter(living_lab_reporter)
            status_element = status_holder.create_status_element("action-list", "A list containing timestamped VOD playback events", initial_value=[])

        else:
            # there are several urls available where reports can be
            # send. one should be picked randomly each time.
            #
            # when a report is successfull it will stay with the same
            # reporter. when a report is unsuccessfull (could not
            # connect) it will cycle through reporters.
            report_urls = [[0, 0, "http://reporter1.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter2.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter3.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter4.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter5.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter6.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter7.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter8.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter9.tribler.org/swarmplayer.py"]]
            shuffle(report_urls)

        while True:
            # sleep in between reports. will send a report immediately
            # when the flush event is set
            self._thread_flush.wait(timeout)
            self._thread_flush.clear()

            # create report
            self._thread_lock.acquire()
            try:
                if self._event:
                    # copy between threads while locked
                    event = self._event

                    self._event = []
                else:
                    # we have nothing to report... sleep
                    timeout = retry_delay
                    event = None

            finally:
                self._thread_lock.release()

            if event:
                # prepend the session-key
                event.insert(0, {"key":"session-key", "timestamp":time(), "event":self._session_key})
                event.insert(0, {"key":"sequence-number", "timestamp":time(), "event":self._sequence_number})
                self._sequence_number += 1

            if USE_LIVING_LAB_REPORTING:
                if event:
                    try:
                        if status_element.set_value(event):
                            # Living lab's doesn't support dynamic reporting.
                            # We use 60 seconds by default
                            timeout = 60
                        else:
                            # something went wrong...
                            retry_delay *= 2
                            timeout = retry_delay
                    except:
                        # error contacting server
                        print_exc(file=sys.stderr)
                        retry_delay *= 2
                        timeout = retry_delay

            else:
                # add new report
                if event:
                    if len(event) < 10:
                        # uncompressed
                        report = {"version":"3",
                                  "created":time(),
                                  "event":event}
                    else:
                        # compress
                        report = {"version":"4",
                                  "created":time(),
                                  "event":urllib.quote(zlib.compress(repr(event), 9))}

                    reports.append(urllib.urlencode(report))

                if not reports:
                    timeout = retry_delay
                    continue

                reporter = report_urls[0]

                if DEBUG: print >> sys.stderr, time.asctime(),'-', "EventStatusReporter: attempting to report,", len(reports[0]), "bytes to", reporter[2]
                try:
                    sock = urllib.urlopen(reporter[2], reports[0])
                    result = sock.read()
                    sock.close()

                    # all ok? then remove the report
                    del reports[0]

                    # increase the 'good-report' counter, no need to re-order
                    reporter[1] += 1
                except:
                    # error contacting server
                    print_exc(file=sys.stderr)
                    retry_delay *= 2

                    # increase the 'bad-report' counter and order by failures
                    reporter[0] += 1
                    report_urls.sort(lambda x, y:cmp(x[0], y[0]))
                    continue

                if result.isdigit():
                    result = int(result)
                    if result == 0:
                        # remote server is not recording, so don't bother
                        # sending events
                        if DEBUG: print >> sys.stderr, time.asctime(),'-', "EventStatusReporter: received -zero- from the HTTP server. Reporting disabled"
                        self._thread_lock.acquire()
                        self._enable_reporting = False
                        self._thread_lock.release()

                        # close thread
                        return

                    else:
                        # I choose not to reset the retry_delay because
                        # swarmplayer sessions tend to be short. And the
                        # if there are connection failures I want as few
                        # retries as possible
                        if DEBUG: print >> sys.stderr, time.asctime(),'-', "EventStatusReporter: report successfull. Next report in", result, "seconds"
                        timeout = result
                else:
                    self._thread_lock.acquire()
                    self._enable_reporting = False
                    self._thread_lock.release()

                    # close thread
                    return
コード例 #17
0
ファイル: Downloader.py プロジェクト: ebcabaybay/swiftarm
    def __init__(self,
                 infohash,
                 storage,
                 picker,
                 backlog,
                 max_rate_period,
                 numpieces,
                 chunksize,
                 measurefunc,
                 snub_time,
                 kickbans_ok,
                 kickfunc,
                 banfunc,
                 bt1dl,
                 scheduler=None):
        self.infohash = infohash
        self.b64_infohash = b64encode(infohash)
        self.storage = storage
        self.picker = picker
        self.backlog = backlog
        self.max_rate_period = max_rate_period
        self.measurefunc = measurefunc
        self.totalmeasure = Measure(max_rate_period * storage.piece_length /
                                    storage.request_size)
        self.numpieces = numpieces
        self.chunksize = chunksize
        self.snub_time = snub_time
        self.kickfunc = kickfunc
        self.banfunc = banfunc
        self.disconnectedseeds = {}
        self.downloads = []
        self.perip = {}
        self.gotbaddata = {}
        self.kicked = {}
        self.banned = {}
        self.kickbans_ok = kickbans_ok
        self.kickbans_halted = False
        self.super_seeding = False
        self.endgamemode = False
        self.endgame_queued_pieces = []
        self.all_requests = []
        self.discarded = 0L
        self.download_rate = 0
        #        self.download_rate = 25000  # 25K/s test rate
        self.bytes_requested = 0
        self.last_time = clock()
        self.queued_out = {}
        self.requeueing = False
        self.paused = False
        self.scheduler = scheduler
        # ProxyService_
        #
        self.bt1dl = bt1dl
        self.proxydownloader = None
        #
        # _ProxyService

        # hack: we should not import this since it is not part of the
        # core nor should we import here, but otherwise we will get
        # import errors
        #
        # _event_reporter stores events that are logged somewhere...
        # from Tribler.Core.Statistics.StatusReporter import get_reporter_instance
        # self._event_reporter = get_reporter_instance()
        self._event_reporter = get_status_holder("LivingLab")

        # check periodicaly
        self.scheduler(self.dlr_periodic_check, 1)
コード例 #18
0
    def OnInitBase(self):
        """ To be wrapped in a OnInit() method that returns True/False """

        # Normal startup
        # Read config
        state_dir = Session.get_default_state_dir('.'+self.appname)

        self.utility = UtilityStub(self.installdir,state_dir)
        self.utility.app = self
        print >>sys.stderr,self.utility.lang.get('build')
        self.iconpath = os.path.join(self.installdir,LIBRARYNAME,'Images',self.appname+'Icon.ico')
        self.logopath = os.path.join(self.installdir,LIBRARYNAME,'Images',self.appname+'Logo.png')


        # Start server for instance2instance communication
        self.i2is = Instance2InstanceServer(self.i2iport,self,timeout=(24.0*3600.0))


        # The playerconfig contains all config parameters that are not
        # saved by checkpointing the Session or its Downloads.
        self.load_playerconfig(state_dir)

        # Install systray icon
        # Note: setting this makes the program not exit when the videoFrame
        # is being closed.
        self.tbicon = PlayerTaskBarIcon(self,self.iconpath)

        # Start Tribler Session
        cfgfilename = Session.get_default_config_filename(state_dir)

        if DEBUG:
            print >>sys.stderr,"main: Session config",cfgfilename
        try:
            self.sconfig = SessionStartupConfig.load(cfgfilename)

            print >>sys.stderr,"main: Session saved port",self.sconfig.get_listen_port(),cfgfilename
        except:
            print_exc()
            self.sconfig = SessionStartupConfig()
            self.sconfig.set_install_dir(self.installdir)
            self.sconfig.set_state_dir(state_dir)
            self.sconfig.set_listen_port(self.sport)
            self.configure_session()

        self.s = Session(self.sconfig)
        self.s.set_download_states_callback(self.sesscb_states_callback)

        # self.reporter = Reporter( self.sconfig )

        if RATELIMITADSL:
            self.ratelimiter = UserDefinedMaxAlwaysOtherwiseEquallyDividedRateManager()
            self.ratelimiter.set_global_max_speed(DOWNLOAD,DOWNLOADSPEED)
            self.ratelimiter.set_global_max_speed(UPLOAD,90)


        # Arno: For extra robustness, ignore any errors related to restarting
        try:
            # Load all other downloads in cache, but in STOPPED state
            self.s.load_checkpoint(initialdlstatus=DLSTATUS_STOPPED)
        except:
            print_exc()

        # Start remote control
        self.i2is.start()

        # report client version
        # from Tribler.Core.Statistics.StatusReporter import get_reporter_instance
        reporter = get_status_holder("LivingLab")
        reporter.create_and_add_event("client-startup-version", [self.utility.lang.get("version")])
        reporter.create_and_add_event("client-startup-build", [self.utility.lang.get("build")])
        reporter.create_and_add_event("client-startup-build-date", [self.utility.lang.get("build_date")])
コード例 #19
0
ファイル: Encrypter.py プロジェクト: ebcabaybay/swiftarm
    def __init__(self, connecter, raw_server, my_id, max_len, schedulefunc,
                 keepalive_delay, download_id, measurefunc, config):
        self.raw_server = raw_server
        self.connecter = connecter
        self.my_id = my_id
        self.max_len = max_len
        self.schedulefunc = schedulefunc
        self.keepalive_delay = keepalive_delay
        self.download_id = download_id
        self.measurefunc = measurefunc
        self.config = config
        self.connections = {}
        self.banned = {}
        self.to_connect = set()
        self.trackertime = None
        self.scheduled_request_new_peers = False

        self.paused = False
        if self.config['max_connections'] == 0:
            self.max_connections = 100
        else:
            self.max_connections = self.config['max_connections']
        """
        In r529 there was a problem when a single Windows client 
        would connect to our text-based seeder (i.e. btlaunchmany) 
        with no other clients present. Apparently both the seeder 
        and client would connect to eachother simultaneously, but 
        not end up with a good connection, halting the client.

        Arno, 2006-03-10: Reappears in ~r890, fixed in r892. It 
        appears to be a problem of writing to a nonblocking socket 
        before it signalled it is ready for writing, although the 
        evidence is inconclusive. 

        Arno: 2006-12-15: Reappears in r2319. There is some weird
        socket problem here. Using Python 2.4.4 doesn't solve it.
        The problem I see here is that as soon as we register
        at the tracker, the single seeder tries to connect to
        us. He succeeds, but after a short while the connection
        appears to be closed by him. We then wind up with no
        connection at all and have to wait until we recontact
        the tracker.

        My workaround is to refuse these initial connections from
        the seeder and wait until I've started connecting to peers
        based on the info I got from the tracker before accepting
        remote connections.
        
        Arno: 2007-02-16: I think I finally found it. The Tribler 
        tracker (BitTornado/BT1/track.py) will do a NAT check
        (BitTornado/BT1/NATCheck) by default, which consists of
        initiating a connection and then closing it after a good 
        BT handshake was received.
        
        The solution now is to make sure we check IP and port to
        identify existing connections. I already added that 2006-12-15,
        so I just removed the restriction on initial connections, 
        which are superfluous.
        """
        self.rerequest = None
        # ProxyService_
        #
        self.proxy = None
        #
        # _ProxyService

        # hack: we should not import this since it is not part of the
        # core nor should we import here, but otherwise we will get
        # import errors
        #
        # _event_reporter stores events that are logged somewhere...
        # from Tribler.Core.Statistics.StatusReporter import get_reporter_instance
        self._event_reporter = get_status_holder("LivingLab")

        # the addresses that have already been reported
        self._known_addresses = {}

        schedulefunc(self.send_keepalives, keepalive_delay)

        # RePEX: added repexer field.
        # Note: perhaps call it observer in the future and make the
        # download engine more observable?
        self.repexer = None