Exemplo n.º 1
0
    def init_session(self):
        scfg = SessionStartupConfig()
        scfg.set_state_dir(tempfile.mkdtemp())
        scfg.set_listen_port(self._port)
        scfg.set_overlay(False)
        scfg.set_megacache(False)
        scfg.set_upnp_mode(simpledefs.UPNPMODE_DISABLED)
        scfg.set_dialback(False)
        scfg.set_social_networking(False)
        scfg.set_buddycast(False)
        scfg.set_crawler(False)
        scfg.set_internal_tracker(False)

        self._session = Session(scfg)
 def getRankedPeers(self, iplist):
     if self.own_ip_info is None:
         # Probably we got a local IP address before, so try now to get the right one from the session (created AFTER this policy!)
         from BaseLib.Core.Session import Session
         self.ip_addr = Session.get_instance().get_external_ip()
         self.own_ip_info = gi_city.record_by_addr(self.ip_addr)
         self._logger.info("ip info %s for ip addr %s " % (self.own_ip_info, self.ip_addr))
     #return RankingPolicy.getRankedPeers(self, iplist)
     res = dict()
     for ip in iplist:
         other = gi_city.record_by_addr(ip)
         if self._compareFiled(self.own_ip_info, other, 'city'):
             res[ip] = 1000
         elif self._compareFiled(self.own_ip_info, other, 'region') \
         or self._compareFiled(self.own_ip_info, other, 'region_name'):
             res[ip] = 500
         elif self._compareFiled(self.own_ip_info, other, 'country_name'):
             res[ip] = 250
         else:
             res[ip] = 0
      
     if self._logger.isEnabledFor(logging.DEBUG):
         self._logger.debug("Ranked iplist %s" % str(res)) 
         
     return res
Exemplo n.º 3
0
    def getRankedPeers(self, iplist):
        if self.own_ip_info is None:
            # Probably we got a local IP address before, so try now to get the right one from the session (created AFTER this policy!)
            from BaseLib.Core.Session import Session
            self.ip_addr = Session.get_instance().get_external_ip()
            self.own_ip_info = gi_city.record_by_addr(self.ip_addr)
            self._logger.info("ip info %s for ip addr %s " %
                              (self.own_ip_info, self.ip_addr))
        #return RankingPolicy.getRankedPeers(self, iplist)
        res = dict()
        for ip in iplist:
            other = gi_city.record_by_addr(ip)
            if self._compareFiled(self.own_ip_info, other, 'city'):
                res[ip] = 1000
            elif self._compareFiled(self.own_ip_info, other, 'region') \
            or self._compareFiled(self.own_ip_info, other, 'region_name'):
                res[ip] = 500
            elif self._compareFiled(self.own_ip_info, other, 'country_name'):
                res[ip] = 250
            else:
                res[ip] = 0

        if self._logger.isEnabledFor(logging.DEBUG):
            self._logger.debug("Ranked iplist %s" % str(res))

        return res
Exemplo n.º 4
0
 def __init__(self):
     from BaseLib.Core.Session import Session # Circular import fix
     self.session = Session.get_instance()
     self.peerdb = RePEXLogDB.getInstance()
     self.downloads = {} # infohash -> Download 
     self.swarmcaches = {} # Download -> [SwarmCache]
     self.repexers = {} # Download -> [repexer]
     # register as global observer
     RePEXer.attach_observer(self)
Exemplo n.º 5
0
 def __init__(self):
     from BaseLib.Core.Session import Session  # Circular import fix
     self.session = Session.get_instance()
     self.peerdb = RePEXLogDB.getInstance()
     self.downloads = {}  # infohash -> Download
     self.swarmcaches = {}  # Download -> [SwarmCache]
     self.repexers = {}  # Download -> [repexer]
     # register as global observer
     RePEXer.attach_observer(self)
Exemplo n.º 6
0
 def __init__(self):
     # always use getInstance() to create this object
     # ARNOCOMMENT: why isn't the lock used on this read?!
     if self.__single != None:
         raise RuntimeError, "RePEXScheduler is singleton"
     from BaseLib.Core.Session import Session # Circular import fix
     self.session = Session.get_instance()
     self.lock = RLock()
     self.active = False
     self.current_repex = None # infohash
     self.downloads = {} # infohash -> Download; in order to stop Downloads that are done repexing
     self.last_attempts = {} # infohash -> ts; in order to prevent starvation when a certain download
Exemplo n.º 7
0
 def init_session(self):
     scfg = SessionStartupConfig()
     scfg.set_state_dir(tempfile.mkdtemp())
     scfg.set_listen_port(self._port)
     scfg.set_overlay(False)
     scfg.set_megacache(False)
     scfg.set_upnp_mode(simpledefs.UPNPMODE_DISABLED)
     scfg.set_dialback(False)
     scfg.set_social_networking(False)
     scfg.set_buddycast(False)
     scfg.set_crawler(False)
     scfg.set_internal_tracker(False)
     
     self._session = Session(scfg)
Exemplo n.º 8
0
 def __init__(self):
     # always use getInstance() to create this object
     # ARNOCOMMENT: why isn't the lock used on this read?!
     if self.__single != None:
         raise RuntimeError, "RePEXScheduler is singleton"
     from BaseLib.Core.Session import Session  # Circular import fix
     self.session = Session.get_instance()
     self.lock = RLock()
     self.active = False
     self.current_repex = None  # infohash
     self.downloads = {
     }  # infohash -> Download; in order to stop Downloads that are done repexing
     self.last_attempts = {
     }  # infohash -> ts; in order to prevent starvation when a certain download
Exemplo n.º 9
0
def get_reporter_instance():
    """
    A helper class that gets the right event reporter based on some
    configuration options.
    """
    session = Session.get_instance()

    if session.get_overlay():
        # hack: we should not import this since it is not part of
        # the core nor should we import here, but otherwise we
        # will get import errors
        #
        # note: the name VideoPlaybackDBHandler is a legacy name from
        # when this reporter was solely used to report video-playback
        # statistics.
        from BaseLib.Core.CacheDB.SqliteVideoPlaybackStatsCacheDB import VideoPlaybackDBHandler
        return VideoPlaybackDBHandler.get_instance()
    else:
        return EventStatusReporter.get_instance()
def get_reporter_instance():
    """
    A helper class that gets the right event reporter based on some
    configuration options.
    """
    session = Session.get_instance()

    if session.get_overlay():
        # hack: we should not import this since it is not part of
        # the core nor should we import here, but otherwise we
        # will get import errors
        #
        # note: the name VideoPlaybackDBHandler is a legacy name from
        # when this reporter was solely used to report video-playback
        # statistics.
        from BaseLib.Core.CacheDB.SqliteVideoPlaybackStatsCacheDB import VideoPlaybackDBHandler
        return VideoPlaybackDBHandler.get_instance()
    else:
        return EventStatusReporter.get_instance()
Exemplo n.º 11
0
 def __init__(self, *args, **kargs):
     # always use getInstance() to create this object
     # ARNOCOMMENT: why isn't the lock used on this read?!
     if self.__single != None:
         raise RuntimeError, "RePEXLogDB is singleton"
     #SQLiteCacheDBBase.__init__(self, *args, **kargs)
     
     from BaseLib.Core.Session import Session # Circular import fix
     state_dir = Session.get_instance().sessconfig['state_dir']
     self.db = os.path.join(state_dir, self.PEERDB_FILE)
     if not os.path.exists(self.db):
         self.version = self.PEERDB_VERSION
         self.history = []
     else:
         import cPickle as pickle
         f = open(self.db,'rb')
         tuple = pickle.load(f)
         self.version, self.history = tuple
         f.close()
Exemplo n.º 12
0
    def __init__(self, *args, **kargs):
        # always use getInstance() to create this object
        # ARNOCOMMENT: why isn't the lock used on this read?!
        if self.__single != None:
            raise RuntimeError, "RePEXLogDB is singleton"
        #SQLiteCacheDBBase.__init__(self, *args, **kargs)

        from BaseLib.Core.Session import Session  # Circular import fix
        state_dir = Session.get_instance().sessconfig['state_dir']
        self.db = os.path.join(state_dir, self.PEERDB_FILE)
        if not os.path.exists(self.db):
            self.version = self.PEERDB_VERSION
            self.history = []
        else:
            import cPickle as pickle
            f = open(self.db, 'rb')
            tuple = pickle.load(f)
            self.version, self.history = tuple
            f.close()
    def _reporting_thread(self):
        """
        Send the report on a separate thread

        We choose not to use a lock object to protect access to
        self._enable_reporting, self._retry_delay, and
        self._report_deadline because only a single thread will write
        and the other threads will only read there variables. Python
        doesn't cause problems in this case.
        """
        # minimum retry delay. this value will grow exponentially with
        # every failure
        retry_delay = 15

        # the amount of time to sleep before the next report (or until
        # the _thread_event is set)
        timeout = retry_delay

        # a list containing all urlencoded reports that have yet been
        # send (most of the time this list will be empty, except when
        # reports could not be delivered)
        reports = []

        # local copy of the self._event when it is being reported
        event = None
        
        if USE_LIVING_LAB_REPORTING:
            # the m18 trial statistics are gathered at the 'living lab'
            session = Session.get_instance()
            living_lab_reporter = LivingLabOnChangeReporter("vod-stats-reporter")
            living_lab_reporter.set_permid(session.get_permid())
            status_holder = get_status_holder("vod-stats")
            status_holder.add_reporter(living_lab_reporter)
            status_element = status_holder.create_status_element("action-list", "A list containing timestamped VOD playback events", initial_value=[])

        else:
            # there are several urls available where reports can be
            # send. one should be picked randomly each time.
            #
            # when a report is successfull it will stay with the same
            # reporter. when a report is unsuccessfull (could not
            # connect) it will cycle through reporters.
            report_urls = [[0, 0, "http://reporter1.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter2.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter3.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter4.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter5.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter6.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter7.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter8.tribler.org/swarmplayer.py"],
                           [0, 0, "http://reporter9.tribler.org/swarmplayer.py"]]
            shuffle(report_urls)

        while True:
            # sleep in between reports. will send a report immediately
            # when the flush event is set
            self._thread_flush.wait(timeout)
            self._thread_flush.clear()

            # create report
            self._thread_lock.acquire()
            try:
                if self._event:
                    # copy between threads while locked
                    event = self._event

                    self._event = []
                else:
                    # we have nothing to report... sleep
                    timeout = retry_delay
                    event = None

            finally:
                self._thread_lock.release()

            if event:
                # prepend the session-key
                event.insert(0, {"key":"session-key", "timestamp":time(), "event":self._session_key})
                event.insert(0, {"key":"sequence-number", "timestamp":time(), "event":self._sequence_number})
                self._sequence_number += 1

            if USE_LIVING_LAB_REPORTING:
                if event:
                    try:
                        if status_element.set_value(event):
                            # Living lab's doesn't support dynamic reporting.
                            # We use 60 seconds by default
                            timeout = 60
                        else:
                            # something went wrong...
                            retry_delay *= 2
                            timeout = retry_delay
                    except:
                        # error contacting server
                        print_exc(file=sys.stderr)
                        retry_delay *= 2
                        timeout = retry_delay

            else:
                # add new report
                if event:
                    if len(event) < 10:
                        # uncompressed
                        report = {"version":"3",
                                  "created":time(),
                                  "event":event}
                    else:
                        # compress
                        report = {"version":"4",
                                  "created":time(),
                                  "event":urllib.quote(zlib.compress(repr(event), 9))}

                    reports.append(urllib.urlencode(report))

                if not reports:
                    timeout = retry_delay
                    continue

                reporter = report_urls[0]

                if DEBUG: print >> sys.stderr, "EventStatusReporter: attempting to report,", len(reports[0]), "bytes to", reporter[2]
                try:
                    sock = urllib.urlopen(reporter[2], reports[0])
                    result = sock.read()
                    sock.close()

                    # all ok? then remove the report
                    del reports[0]

                    # increase the 'good-report' counter, no need to re-order
                    reporter[1] += 1
                except:
                    # error contacting server
                    print_exc(file=sys.stderr)
                    retry_delay *= 2

                    # increase the 'bad-report' counter and order by failures
                    reporter[0] += 1
                    report_urls.sort(lambda x, y:cmp(x[0], y[0]))
                    continue

                if result.isdigit():
                    result = int(result)
                    if result == 0:
                        # remote server is not recording, so don't bother
                        # sending events
                        if DEBUG: print >> sys.stderr, "EventStatusReporter: received -zero- from the HTTP server. Reporting disabled"
                        self._thread_lock.acquire()
                        self._enable_reporting = False
                        self._thread_lock.release()

                        # close thread
                        return

                    else:
                        # I choose not to reset the retry_delay because
                        # swarmplayer sessions tend to be short. And the
                        # if there are connection failures I want as few
                        # retries as possible
                        if DEBUG: print >> sys.stderr, "EventStatusReporter: report successfull. Next report in", result, "seconds"
                        timeout = result
                else:
                    self._thread_lock.acquire()
                    self._enable_reporting = False
                    self._thread_lock.release()

                    # close thread
                    return
Exemplo n.º 14
0
class SupporterServer(object):
    def __init__(self, options):
        self._directory = options.directory
        self._port = options.port
        self._torrent = options.torrent
        self._max_dl_rate = options.dlrate
        self._max_ul_rate = options.ulrate
        self._min_peer = options.min_peer
        self._max_peer = options.max_peer
        self._choke_objects = []  # list of all chokers...

    def init_session(self):
        scfg = SessionStartupConfig()
        scfg.set_state_dir(tempfile.mkdtemp())
        scfg.set_listen_port(self._port)
        scfg.set_overlay(False)
        scfg.set_megacache(False)
        scfg.set_upnp_mode(simpledefs.UPNPMODE_DISABLED)
        scfg.set_dialback(False)
        scfg.set_social_networking(False)
        scfg.set_buddycast(False)
        scfg.set_crawler(False)
        scfg.set_internal_tracker(False)

        self._session = Session(scfg)

    def start_torrents(self):
        torrents = []
        if os.path.isdir(self._torrent):
            # walk the dir and start all torrents in there
            torrents = files_list(
                self._torrent,
                [constants.TORRENT_DOWNLOAD_EXT, constants.TORRENT_VOD_EXT])
        else:
            torrents.append(self._torrent)

        for torrent in torrents:
            self.start_torrent(torrent)

    def start_torrent(self, torrent):
        tdef = TorrentDef.load(torrent)

        if not os.access(self._directory, os.F_OK):
            os.makedirs(self._directory)

        dscfg = DownloadStartupConfig()
        dscfg.set_dest_dir(self._directory)
        dscfg.set_video_events([
            simpledefs.VODEVENT_START, simpledefs.VODEVENT_PAUSE,
            simpledefs.VODEVENT_RESUME
        ])
        dscfg.set_max_speed(simpledefs.DOWNLOAD, self._max_dl_rate)
        dscfg.set_max_speed(simpledefs.UPLOAD, self._max_ul_rate)
        dscfg.set_peer_type("S")
        #dscfg.set_video_event_callback(self.video_callback) # supporter should not play the files !

        d = self._session.start_download(tdef, dscfg)
        d.set_state_callback(self.state_callback)

        time.sleep(1)  # give the download some time to fully initialize
        d.sd.dow.choker.set_supporter_server(True)

        self._tracker_url = tdef.get_tracker()[:tdef.get_tracker().
                                               find("announce")]
        self._id = d.sd.peerid
        self._choke_objects.append(d.sd.dow.choker)

    def init_communicator(self):
        self._communicator = HTTPCommunicator({
            'url': self._tracker_url,
            'id': self._id,
            'port': self._port,
            'min_peer': self._min_peer,
            'max_peer': self._max_peer
        })
        self._communicator.send_registration()

    def run_forever(self):
        self.init_session()
        self.start_torrents()
        self.init_communicator()

        xmlrpc_server_handler = SupporterXMLRPCServer(self)
        xmlrpc_server = SimpleXMLRPCServer.SimpleXMLRPCServer(
            ("0.0.0.0", self._port + 1))
        xmlrpc_server.register_instance(xmlrpc_server_handler)
        xmlrpc_server.serve_forever()

    def state_callback(self, ds):
        return (0, False)

    def video_callback(self, d, event, params):
        pass

    def push_supportee_list_to_choker(self, supportee_list):
        for choker in self._choke_objects:
            choker.receive_supportee_list(supportee_list)
Exemplo n.º 15
0
    def __init__(self,bt1download,videostatus,videoinfo,videoanalyserpath,vodeventfunc):

        # dirty hack to get the Tribler Session
        from BaseLib.Core.Session import Session
        session = Session.get_instance()

        if session.get_overlay():
            # see comment in else section on importing...
            from BaseLib.Core.CacheDB.SqliteVideoPlaybackStatsCacheDB import VideoPlaybackDBHandler
            self._playback_stats = VideoPlaybackDBHandler.get_instance()
        else:
            # hack: we should not import this since it is not part of
            # the core nor should we import here, but otherwise we
            # will get import errors
            from BaseLib.Player.Reporter import VideoPlaybackReporter
            self._playback_stats = VideoPlaybackReporter.get_instance()
            
        # add an event to indicate that the user wants playback to
        # start
        def set_nat(nat):
            self._playback_stats.add_event(self._playback_key, "nat:%s" % nat)
        self._playback_key = base64.b64encode(os.urandom(20))
        self._playback_stats.add_event(self._playback_key, "play-init")
        self._playback_stats.add_event(self._playback_key, "piece-size:%d" % videostatus.piecelen)
        self._playback_stats.add_event(self._playback_key, "num-pieces:%d" % videostatus.movie_numpieces)
        self._playback_stats.add_event(self._playback_key, "bitrate:%d" % videostatus.bitrate)
        self._playback_stats.add_event(self._playback_key, "nat:%s" % session.get_nat_type(callback=set_nat))


        self._complete = False
        self.videoinfo = videoinfo
        self.bt1download = bt1download
        self.piecepicker = bt1download.picker
        self.rawserver = bt1download.rawserver
        self.storagewrapper = bt1download.storagewrapper
        self.fileselector = bt1download.fileselector

        self.vodeventfunc = vodeventfunc
        self.videostatus = vs = videostatus
        
        # Add quotes around path, as that's what os.popen() wants on win32
        if sys.platform == "win32" and videoanalyserpath is not None and videoanalyserpath.find(' ') != -1:
            self.video_analyser_path='"'+videoanalyserpath+'"'
        else:
            self.video_analyser_path=videoanalyserpath

        # counter for the sustainable() call. Every X calls the
        # buffer-percentage is updated.
        self.sustainable_counter = sys.maxint

        # boudewijn: because we now update the downloadrate for each
        # received chunk instead of each piece we do not need to
        # average the measurement over a 'long' period of time. Also,
        # we only update the downloadrate for pieces that are in the
        # high priority range giving us a better estimation on how
        # likely the pieces will be available on time.
        self.overall_rate = Measure(10)
        self.high_range_rate = Measure(2)

        # boudewijn: increase the initial minimum buffer size
        vs.increase_high_range()

        # buffer: a link to the piecepicker buffer
        self.has = self.piecepicker.has

        # number of pieces in buffer
        self.pieces_in_buffer = 0

        self.data_ready = Condition()
        
        # Arno: Call FFMPEG only if the torrent did not provide the 
        # bitrate and video dimensions. This is becasue FFMPEG 
        # sometimes hangs e.g. Ivaylo's Xvid Finland AVI, for unknown 
        # reasons
        
        # Arno: 2007-01-06: Since we use VideoLan player, videodimensions not important
        if vs.bitrate_set:
            self.doing_ffmpeg_analysis = False
            self.doing_bitrate_est = False
            self.videodim = None #self.movieselector.videodim
        else:
            self.doing_ffmpeg_analysis = True
            self.doing_bitrate_est = True
            self.videodim = None

        self.player_opened_with_width_height = False
        self.ffmpeg_est_bitrate = None
        
        # number of packets required to preparse the video
        # I say we need 128 KB to sniff size and bitrate
        
        # Arno: 2007-01-04: Changed to 1MB. It appears ffplay works better with some
        # decent prebuffering. We should replace this with a timing based thing, 
        
        if not self.doing_bitrate_est:
            prebufsecs = self.PREBUF_SEC_VOD

            # assumes first piece is whole (first_piecelen == piecelen)
            piecesneeded = vs.time_to_pieces( prebufsecs )
            bytesneeded = piecesneeded * vs.piecelen
        else:
            # Arno, 2007-01-08: for very high bitrate files e.g. 
            # 850 kilobyte/s (500 MB for 10 min 20 secs) this is too small
            # and we'll have packet loss because we start too soon.
            bytesneeded = 1024 * 1024
            piecesneeded = 1 + int(ceil((bytesneeded - vs.piecelen) / float(vs.piecelen)))

        self.max_prebuf_packets = min(vs.movie_numpieces, piecesneeded)

        if self.doing_ffmpeg_analysis and DEBUG:
            print >>sys.stderr,"vod: trans: Want",self.max_prebuf_packets,"pieces for FFMPEG analysis, piecesize",vs.piecelen

        if DEBUG:
            print >>sys.stderr,"vod: trans: Want",self.max_prebuf_packets,"pieces for prebuffering"

        self.nreceived = 0
        
        if DEBUG:
            print >>sys.stderr,"vod: trans: Setting MIME type to",self.videoinfo['mimetype']
        
        self.set_mimetype(self.videoinfo['mimetype'])

        # some statistics
        self.stat_playedpieces = 0 # number of pieces played successfully
        self.stat_latepieces = 0 # number of pieces that arrived too late
        self.stat_droppedpieces = 0 # number of pieces dropped
        self.stat_stalltime = 0.0 # total amount of time the video was stalled
        self.stat_prebuffertime = 0.0 # amount of prebuffer time used
        self.stat_pieces = PieceStats() # information about each piece

        # start periodic tasks
        self.curpiece = ""
        self.curpiece_pos = 0
        # The outbuf keeps only the pieces from the base layer.. We play if we 
        # have at least a piece from the base layer!
        self.outbuf = []
        #self.last_pop = None # time of last pop
        self.reset_bitrate_prediction()

        self.lasttime=0
        # For DownloadState
        self.prebufprogress = 0.0
        self.prebufstart = time.time()
        self.playable = False
        self.usernotified = False
        
        self.outbuflen = None

        # LIVESOURCEAUTH
        self.authenticator = None

        self.refill_rawserv_tasker()
        self.tick_second()

        # link to others (last thing to do)
        self.piecepicker.set_transporter( self )
        #self.start()

        if FAKEPLAYBACK:
            import threading
            
            class FakeReader(threading.Thread):
                def __init__(self,movie):
                    threading.Thread.__init__(self)
                    self.movie = movie
                    
                def run(self):
                    self.movie.start()
                    while not self.movie.done():
                        self.movie.read()
            
            t = FakeReader(self)
            t.start()
Exemplo n.º 16
0
    def __init__(self,bt1download,videostatus,videoinfo,videoanalyserpath,vodeventfunc):

        # dirty hack to get the Tribler Session
        from BaseLib.Core.Session import Session
        session = Session.get_instance()

        if session.get_overlay():
            # see comment in else section on importing...
            from BaseLib.Core.CacheDB.SqliteVideoPlaybackStatsCacheDB import VideoPlaybackDBHandler
            self._playback_stats = VideoPlaybackDBHandler.get_instance()
        else:
            # hack: we should not import this since it is not part of
            # the core nor should we import here, but otherwise we
            # will get import errors
            from BaseLib.Player.Reporter import VideoPlaybackReporter
            self._playback_stats = VideoPlaybackReporter.get_instance()
            
        # add an event to indicate that the user wants playback to
        # start
        def set_nat(nat):
            self._playback_stats.add_event(self._playback_key, "nat:%s" % nat)
        self._playback_key = base64.b64encode(os.urandom(20))
        self._playback_stats.add_event(self._playback_key, "play-init")
        self._playback_stats.add_event(self._playback_key, "piece-size:%d" % videostatus.piecelen)
        self._playback_stats.add_event(self._playback_key, "num-pieces:%d" % videostatus.movie_numpieces)
        self._playback_stats.add_event(self._playback_key, "bitrate:%d" % videostatus.bitrate)
        self._playback_stats.add_event(self._playback_key, "nat:%s" % session.get_nat_type(callback=set_nat))


        self._complete = False
        self.videoinfo = videoinfo
        self.bt1download = bt1download
        self.piecepicker = bt1download.picker
        self.rawserver = bt1download.rawserver
        self.storagewrapper = bt1download.storagewrapper
        self.fileselector = bt1download.fileselector

        self.vodeventfunc = vodeventfunc
        self.videostatus = vs = videostatus
        
        # Add quotes around path, as that's what os.popen() wants on win32
        if sys.platform == "win32" and videoanalyserpath is not None and videoanalyserpath.find(' ') != -1:
            self.video_analyser_path='"'+videoanalyserpath+'"'
        else:
            self.video_analyser_path=videoanalyserpath

        # counter for the sustainable() call. Every X calls the
        # buffer-percentage is updated.
        self.sustainable_counter = sys.maxint

        # boudewijn: because we now update the downloadrate for each
        # received chunk instead of each piece we do not need to
        # average the measurement over a 'long' period of time. Also,
        # we only update the downloadrate for pieces that are in the
        # high priority range giving us a better estimation on how
        # likely the pieces will be available on time.
        self.overall_rate = Measure(10)
        self.high_range_rate = Measure(2)

        # buffer: a link to the piecepicker buffer
        self.has = self.piecepicker.has

        # number of pieces in buffer
        self.pieces_in_buffer = 0

        self.data_ready = Condition()
        
        # Arno: Call FFMPEG only if the torrent did not provide the 
        # bitrate and video dimensions. This is becasue FFMPEG 
        # sometimes hangs e.g. Ivaylo's Xvid Finland AVI, for unknown 
        # reasons
        
        # Arno: 2007-01-06: Since we use VideoLan player, videodimensions not important
        assert vs.bitrate_set
        self.doing_ffmpeg_analysis = False
        self.doing_bitrate_est = False
        self.videodim = None #self.movieselector.videodim

        self.player_opened_with_width_height = False
        self.ffmpeg_est_bitrate = None
        
        prebufsecs = self.PREBUF_SEC_VOD

        # assumes first piece is whole (first_piecelen == piecelen)
        piecesneeded = vs.time_to_pieces( prebufsecs )
        bytesneeded = piecesneeded * vs.piecelen

        self.max_prebuf_packets = min(vs.movie_numpieces, piecesneeded)

        if self.doing_ffmpeg_analysis and DEBUG:
            print >>sys.stderr,time.asctime(),'-', "vod: trans: Want",self.max_prebuf_packets,"pieces for FFMPEG analysis, piecesize",vs.piecelen

        if DEBUG:
            print >>sys.stderr,time.asctime(),'-', "vod: trans: Want",self.max_prebuf_packets,"pieces for prebuffering"

        self.nreceived = 0
        
        if DEBUG:
            print >>sys.stderr,time.asctime(),'-', "vod: trans: Setting MIME type to",self.videoinfo['mimetype']
        
        self.set_mimetype(self.videoinfo['mimetype'])

        # some statistics
        self.stat_playedpieces = 0 # number of pieces played successfully
        self.stat_latepieces = 0 # number of pieces that arrived too late
        self.stat_droppedpieces = 0 # number of pieces dropped
        self.stat_stalltime = 0.0 # total amount of time the video was stalled
        self.stat_prebuffertime = 0.0 # amount of prebuffer time used
        self.stat_pieces = PieceStats() # information about each piece

        # start periodic tasks
        self.curpiece = ""
        self.curpiece_pos = 0
        # The outbuf keeps only the pieces from the base layer.. We play if we 
        # have at least a piece from the base layer!
        self.outbuf = []
        #self.last_pop = None # time of last pop
        self.reset_bitrate_prediction()

        self.lasttime=0
        # For DownloadState
        self.prebufprogress = 0.0
        self.prebufstart = time.time()
        self.playable = False
        self.usernotified = False
        
        self.outbuflen = None

        # LIVESOURCEAUTH
        self.authenticator = None

        self.refill_rawserv_tasker()
        self.tick_second()

        # link to others (last thing to do)
        self.piecepicker.set_transporter( self )
        #self.start()

        if FAKEPLAYBACK:
            import threading
            
            class FakeReader(threading.Thread):
                def __init__(self,movie):
                    threading.Thread.__init__(self)
                    self.movie = movie
                    
                def run(self):
                    self.movie.start()
                    while not self.movie.done():
                        self.movie.read()
            
            t = FakeReader(self)
            t.start()
Exemplo n.º 17
0
    __TORRENT_FILE__ = options.torrent
    __LOGFILE__ = options.logfile or 'vodclient.log'

    scfg = SessionStartupConfig()
    scfg.set_state_dir(tempfile.mkdtemp())
    scfg.set_listen_port(options.port)
    scfg.set_overlay(False)
    scfg.set_megacache(False)
    scfg.set_upnp_mode(simpledefs.UPNPMODE_DISABLED)
    scfg.set_dialback(False)
    scfg.set_social_networking(False)
    scfg.set_buddycast(False)
    scfg.set_crawler(False)
    scfg.set_internal_tracker(False)

    s = Session(scfg)

    tdef = TorrentDef.load(__TORRENT_FILE__)
# tdef.get_tracker() returns the announce-url; we must omit the "announce" part
    tracker_url = tdef.get_tracker()[:tdef.get_tracker().find("announce")]

    if tdef.get_bitrate() == None:
        print >>sys.stderr, "Provided torrent file has no bitrate information. Exiting."
        sys.exit(1)

    BITRATE = tdef.get_bitrate()
    print >>sys.stderr, "Calculated bitrate is %d" % BITRATE
    client_stats['video_duration'] = int(tdef.get_length() / tdef.get_bitrate())

    if not os.access(options.directory, os.F_OK):
            os.makedirs(options.directory)
Exemplo n.º 18
0
 def __init__(self):
     self.file = None
     self.path = os.path.join(Session.get_instance().get_state_dir(), "udppuncture.log")
Exemplo n.º 19
0
 def download(self):
     from BaseLib.Core.Session import Session
     session = Session.get_instance()
     session.uch.perform_usercallback(self._download)
Exemplo n.º 20
0
    __TORRENT_FILE__ = options.torrent
    __LOGFILE__ = options.logfile or 'vodclient.log'

    scfg = SessionStartupConfig()
    scfg.set_state_dir(tempfile.mkdtemp())
    scfg.set_listen_port(options.port)
    scfg.set_overlay(False)
    scfg.set_megacache(False)
    scfg.set_upnp_mode(simpledefs.UPNPMODE_DISABLED)
    scfg.set_dialback(False)
    scfg.set_social_networking(False)
    scfg.set_buddycast(False)
    scfg.set_crawler(False)
    scfg.set_internal_tracker(False)

    s = Session(scfg)

    tdef = TorrentDef.load(__TORRENT_FILE__)
    # tdef.get_tracker() returns the announce-url; we must omit the "announce" part
    tracker_url = tdef.get_tracker()[:tdef.get_tracker().find("announce")]

    if tdef.get_bitrate() == None:
        print >> sys.stderr, "Provided torrent file has no bitrate information. Exiting."
        sys.exit(1)

    BITRATE = tdef.get_bitrate()
    print >> sys.stderr, "Calculated bitrate is %d" % BITRATE
    client_stats['video_duration'] = int(tdef.get_length() /
                                         tdef.get_bitrate())

    if not os.access(options.directory, os.F_OK):
Exemplo n.º 21
0
    def updateDB(self, fromver, tover):

        # bring database up to version 2, if necessary
        if fromver < 2:
            sql = """

-- Patch for BuddyCast 4

ALTER TABLE MyPreference ADD COLUMN click_position INTEGER DEFAULT -1;
ALTER TABLE MyPreference ADD COLUMN reranking_strategy INTEGER DEFAULT -1;
ALTER TABLE Preference ADD COLUMN click_position INTEGER DEFAULT -1;
ALTER TABLE Preference ADD COLUMN reranking_strategy INTEGER DEFAULT -1;
CREATE TABLE ClicklogSearch (
                     peer_id INTEGER DEFAULT 0,
                     torrent_id INTEGER DEFAULT 0,
                     term_id INTEGER DEFAULT 0,
                     term_order INTEGER DEFAULT 0
                     );
CREATE INDEX idx_search_term ON ClicklogSearch (term_id);
CREATE INDEX idx_search_torrent ON ClicklogSearch (torrent_id);


CREATE TABLE ClicklogTerm (
                    term_id INTEGER PRIMARY KEY AUTOINCREMENT DEFAULT 0,
                    term VARCHAR(255) NOT NULL,
                    times_seen INTEGER DEFAULT 0 NOT NULL
                    );
CREATE INDEX idx_terms_term ON ClicklogTerm(term);  
    
"""

            self.execute_write(sql, commit=False)

        if fromver < 3:
            sql = """
-- Patch for Local Peer Discovery
            
ALTER TABLE Peer ADD COLUMN is_local integer DEFAULT 0;
"""
            self.execute_write(sql, commit=False)

        if fromver < 4:
            sql = """
-- V2: Patch for VoteCast

DROP TABLE IF EXISTS ModerationCast;
DROP INDEX IF EXISTS moderationcast_idx;

DROP TABLE IF EXISTS Moderators;
DROP INDEX IF EXISTS moderators_idx;

DROP TABLE IF EXISTS VoteCast;
DROP INDEX IF EXISTS votecast_idx;

CREATE TABLE VoteCast (
mod_id text,
voter_id text,
vote integer,
time_stamp integer
);

CREATE INDEX mod_id_idx
on VoteCast 
(mod_id);

CREATE INDEX voter_id_idx
on VoteCast 
(voter_id);

CREATE UNIQUE INDEX votecast_idx
ON VoteCast
(mod_id, voter_id);
            
--- patch for BuddyCast 5 : Creation of Popularity table and relevant stuff

CREATE TABLE Popularity (
                         torrent_id INTEGER,
                         peer_id INTEGER,
                         msg_receive_time NUMERIC,
                         size_calc_age NUMERIC,
                         num_seeders INTEGER DEFAULT 0,
                         num_leechers INTEGER DEFAULT 0,
                         num_of_sources INTEGER DEFAULT 0
                     );

CREATE INDEX Message_receive_time_idx 
  ON Popularity 
   (msg_receive_time);

CREATE INDEX Size_calc_age_idx 
  ON Popularity 
   (size_calc_age);

CREATE INDEX Number_of_seeders_idx 
  ON Popularity 
   (num_seeders);

CREATE INDEX Number_of_leechers_idx 
  ON Popularity 
   (num_leechers);

CREATE UNIQUE INDEX Popularity_idx
  ON Popularity
   (torrent_id, peer_id, msg_receive_time);

-- v4: Patch for ChannelCast, Search

CREATE TABLE ChannelCast (
publisher_id text,
publisher_name text,
infohash text,
torrenthash text,
torrentname text,
time_stamp integer,
signature text
);

CREATE INDEX pub_id_idx
on ChannelCast
(publisher_id);

CREATE INDEX pub_name_idx
on ChannelCast
(publisher_name);

CREATE INDEX infohash_ch_idx
on ChannelCast
(infohash);

----------------------------------------

CREATE TABLE InvertedIndex (
word               text NOT NULL,
torrent_id         integer
);

CREATE INDEX word_idx
on InvertedIndex
(word);

CREATE UNIQUE INDEX invertedindex_idx
on InvertedIndex
(word,torrent_id);

----------------------------------------

-- Set all similarity to zero because we are using a new similarity
-- function and the old values no longer correspond to the new ones
UPDATE Peer SET similarity = 0;
UPDATE Torrent SET relevance = 0;

"""
            self.execute_write(sql, commit=False)
        if fromver < 5:
            sql = """
--------------------------------------
-- Creating Subtitles (future RichMetadata) DB
----------------------------------
CREATE TABLE Metadata (
  metadata_id integer PRIMARY KEY ASC AUTOINCREMENT NOT NULL,
  publisher_id text NOT NULL,
  infohash text NOT NULL,
  description text,
  timestamp integer NOT NULL,
  signature text NOT NULL,
  UNIQUE (publisher_id, infohash),
  FOREIGN KEY (publisher_id, infohash) 
    REFERENCES ChannelCast(publisher_id, infohash) 
    ON DELETE CASCADE -- the fk constraint is not enforced by sqlite
);

CREATE INDEX infohash_md_idx
on Metadata(infohash);

CREATE INDEX pub_md_idx
on Metadata(publisher_id);


CREATE TABLE Subtitles (
  metadata_id_fk integer,
  subtitle_lang text NOT NULL,
  subtitle_location text,
  checksum text NOT NULL,
  UNIQUE (metadata_id_fk,subtitle_lang),
  FOREIGN KEY (metadata_id_fk) 
    REFERENCES Metadata(metadata_id) 
    ON DELETE CASCADE, -- the fk constraint is not enforced by sqlite
  
  -- ISO639-2 uses 3 characters for lang codes
  CONSTRAINT lang_code_length 
    CHECK ( length(subtitle_lang) == 3 ) 
);


CREATE INDEX metadata_sub_idx
on Subtitles(metadata_id_fk);

-- Stores the subtitles that peers have as an integer bitmask
 CREATE TABLE SubtitlesHave (
    metadata_id_fk integer,
    peer_id text NOT NULL,
    have_mask integer NOT NULL,
    received_ts integer NOT NULL, --timestamp indicating when the mask was received
    UNIQUE (metadata_id_fk, peer_id),
    FOREIGN KEY (metadata_id_fk)
      REFERENCES Metadata(metadata_id)
      ON DELETE CASCADE, -- the fk constraint is not enforced by sqlite

    -- 32 bit unsigned integer
    CONSTRAINT have_mask_length
      CHECK (have_mask >= 0 AND have_mask < 4294967296)
);

CREATE INDEX subtitles_have_idx
on SubtitlesHave(metadata_id_fk);

-- this index can boost queries
-- ordered by timestamp on the SubtitlesHave DB
CREATE INDEX subtitles_have_ts
on SubtitlesHave(received_ts);

"""
            self.execute_write(sql, commit=False)

        # updating version stepwise so if this works, we store it
        # regardless of later, potentially failing updates
        self.writeDBVersion(CURRENT_MAIN_DB_VERSION, commit=False)
        self.commit()

        # now the start the process of parsing the torrents to insert into
        # InvertedIndex table.
        if TEST_SQLITECACHEDB_UPGRADE:
            state_dir = "."
        else:
            from BaseLib.Core.Session import Session

            session = Session.get_instance()
            state_dir = session.get_state_dir()
        tmpfilename = os.path.join(state_dir, "upgradingdb.txt")
        if fromver < 4 or os.path.exists(tmpfilename):

            def upgradeTorrents():
                # fetch some un-inserted torrents to put into the InvertedIndex
                sql = """
                SELECT torrent_id, name, torrent_file_name
                FROM Torrent
                WHERE torrent_id NOT IN (SELECT DISTINCT torrent_id FROM InvertedIndex)
                AND torrent_file_name IS NOT NULL
                LIMIT 20"""
                records = self.fetchall(sql)

                if len(records) == 0:
                    # upgradation is complete and hence delete the temp file
                    os.remove(tmpfilename)
                    if DEBUG:
                        print >> sys.stderr, time.asctime(), "-", "DB Upgradation: temp-file deleted", tmpfilename
                    return

                for torrent_id, name, torrent_file_name in records:
                    try:
                        abs_filename = os.path.join(session.get_torrent_collecting_dir(), torrent_file_name)
                        if not os.path.exists(abs_filename):
                            raise RuntimeError(".torrent file not found. Use fallback.")
                        torrentdef = TorrentDef.load(abs_filename)
                        torrent_name = torrentdef.get_name_as_unicode()
                        keywords = Set(split_into_keywords(torrent_name))
                        for filename in torrentdef.get_files_as_unicode():
                            keywords.update(split_into_keywords(filename))

                    except:
                        # failure... most likely the .torrent file
                        # is invalid

                        # use keywords from the torrent name
                        # stored in the database
                        torrent_name = dunno2unicode(name)
                        keywords = Set(split_into_keywords(torrent_name))

                    # store the keywords in the InvertedIndex
                    # table in the database
                    if len(keywords) > 0:
                        values = [(keyword, torrent_id) for keyword in keywords]
                        self.executemany(u"INSERT OR REPLACE INTO InvertedIndex VALUES(?, ?)", values, commit=False)
                        if DEBUG:
                            print >> sys.stderr, time.asctime(), "-", "DB Upgradation: Extending the InvertedIndex table with", len(
                                values
                            ), "new keywords for", torrent_name

                # now commit, after parsing the batch of torrents
                self.commit()

                # upgradation not yet complete; comeback after 5 sec
                tqueue.add_task(upgradeTorrents, 5)

            # Create an empty file to mark the process of upgradation.
            # In case this process is terminated before completion of upgradation,
            # this file remains even though fromver >= 4 and hence indicating that
            # rest of the torrents need to be inserted into the InvertedIndex!

            # ensure the temp-file is created, if it is not already
            try:
                open(tmpfilename, "w")
                if DEBUG:
                    print >> sys.stderr, time.asctime(), "-", "DB Upgradation: temp-file successfully created"
            except:
                if DEBUG:
                    print >> sys.stderr, time.asctime(), "-", "DB Upgradation: failed to create temp-file"

            if DEBUG:
                print >> sys.stderr, time.asctime(), "-", "Upgrading DB .. inserting into InvertedIndex"
            from BaseLib.Utilities.TimedTaskQueue import TimedTaskQueue
            from sets import Set
            from BaseLib.Core.Search.SearchManager import split_into_keywords
            from BaseLib.Core.TorrentDef import TorrentDef

            # start the upgradation after 10 seconds
            tqueue = TimedTaskQueue("UpgradeDB")
            tqueue.add_task(upgradeTorrents, 10)
Exemplo n.º 22
0
class SupporterServer(object):
    def __init__(self, options):
        self._directory = options.directory
        self._port = options.port
        self._torrent = options.torrent
        self._max_dl_rate = options.dlrate
        self._max_ul_rate = options.ulrate
        self._min_peer = options.min_peer
        self._max_peer = options.max_peer
        self._choke_objects = [] # list of all chokers...
        
    def init_session(self):
        scfg = SessionStartupConfig()
        scfg.set_state_dir(tempfile.mkdtemp())
        scfg.set_listen_port(self._port)
        scfg.set_overlay(False)
        scfg.set_megacache(False)
        scfg.set_upnp_mode(simpledefs.UPNPMODE_DISABLED)
        scfg.set_dialback(False)
        scfg.set_social_networking(False)
        scfg.set_buddycast(False)
        scfg.set_crawler(False)
        scfg.set_internal_tracker(False)
        
        self._session = Session(scfg)
        
    def start_torrents(self):
        torrents = []
        if os.path.isdir(self._torrent):
            # walk the dir and start all torrents in there
            torrents = files_list(self._torrent, [constants.TORRENT_DOWNLOAD_EXT, constants.TORRENT_VOD_EXT])
        else:
            torrents.append(self._torrent)
            
        for torrent in torrents:
            self.start_torrent(torrent)
        
    def start_torrent(self, torrent):
        tdef = TorrentDef.load(torrent)
        
        if not os.access(self._directory, os.F_OK):
            os.makedirs(self._directory)
        
        dscfg = DownloadStartupConfig()
        dscfg.set_dest_dir(self._directory)
        dscfg.set_video_events([simpledefs.VODEVENT_START,
                                simpledefs.VODEVENT_PAUSE,
                                simpledefs.VODEVENT_RESUME])
        dscfg.set_max_speed(simpledefs.DOWNLOAD, self._max_dl_rate)
        dscfg.set_max_speed(simpledefs.UPLOAD, self._max_ul_rate)
        dscfg.set_peer_type("S")
        #dscfg.set_video_event_callback(self.video_callback) # supporter should not play the files !
        
        d = self._session.start_download(tdef, dscfg)
        d.set_state_callback(self.state_callback)
        
        time.sleep(1) # give the download some time to fully initialize
        d.sd.dow.choker.set_supporter_server(True)
        
        
        self._tracker_url = tdef.get_tracker()[:tdef.get_tracker().find("announce")]
        self._id = d.sd.peerid
        self._choke_objects.append(d.sd.dow.choker)
        
    def init_communicator(self):
        self._communicator = HTTPCommunicator({'url' : self._tracker_url,
                                               'id' : self._id,
                                               'port' : self._port,
                                               'min_peer' : self._min_peer,
                                               'max_peer' : self._max_peer})
        self._communicator.send_registration()
        
    def run_forever(self):
        self.init_session()
        self.start_torrents()
        self.init_communicator()
        
        xmlrpc_server_handler = SupporterXMLRPCServer(self)
        xmlrpc_server = SimpleXMLRPCServer.SimpleXMLRPCServer(("0.0.0.0", self._port+1))
        xmlrpc_server.register_instance(xmlrpc_server_handler)
        xmlrpc_server.serve_forever()
        
    def state_callback(self, ds):
        return (0, False)
    
    def video_callback(self, d, event, params):
        pass
    
    def push_supportee_list_to_choker(self, supportee_list):
        for choker in self._choke_objects:
            choker.receive_supportee_list(supportee_list)
Exemplo n.º 23
0
    def _reporting_thread(self):
        """
        Send the report on a separate thread

        We choose not to use a lock object to protect access to
        self._enable_reporting, self._retry_delay, and
        self._report_deadline because only a single thread will write
        and the other threads will only read there variables. Python
        doesn't cause problems in this case.
        """
        # minimum retry delay. this value will grow exponentially with
        # every failure
        retry_delay = 15

        # the amount of time to sleep before the next report (or until
        # the _thread_event is set)
        timeout = retry_delay

        # a list containing all urlencoded reports that have yet been
        # send (most of the time this list will be empty, except when
        # reports could not be delivered)
        reports = []

        # local copy of the self._event when it is being reported
        event = None

        if USE_LIVING_LAB_REPORTING:
            # the m18 trial statistics are gathered at the 'living lab'
            session = Session.get_instance()
            living_lab_reporter = LivingLabOnChangeReporter(
                "vod-stats-reporter")
            living_lab_reporter.set_permid(session.get_permid())
            status_holder = get_status_holder("vod-stats")
            status_holder.add_reporter(living_lab_reporter)
            status_element = status_holder.create_status_element(
                "action-list",
                "A list containing timestamped VOD playback events",
                initial_value=[])

        else:
            # there are several urls available where reports can be
            # send. one should be picked randomly each time.
            #
            # when a report is successfull it will stay with the same
            # reporter. when a report is unsuccessfull (could not
            # connect) it will cycle through reporters.
            report_urls = [
                [0, 0, "http://reporter1.tribler.org/swarmplayer.py"],
                [0, 0, "http://reporter2.tribler.org/swarmplayer.py"],
                [0, 0, "http://reporter3.tribler.org/swarmplayer.py"],
                [0, 0, "http://reporter4.tribler.org/swarmplayer.py"],
                [0, 0, "http://reporter5.tribler.org/swarmplayer.py"],
                [0, 0, "http://reporter6.tribler.org/swarmplayer.py"],
                [0, 0, "http://reporter7.tribler.org/swarmplayer.py"],
                [0, 0, "http://reporter8.tribler.org/swarmplayer.py"],
                [0, 0, "http://reporter9.tribler.org/swarmplayer.py"]
            ]
            shuffle(report_urls)

        while True:
            # sleep in between reports. will send a report immediately
            # when the flush event is set
            self._thread_flush.wait(timeout)
            self._thread_flush.clear()

            # create report
            self._thread_lock.acquire()
            try:
                if self._event:
                    # copy between threads while locked
                    event = self._event

                    self._event = []
                else:
                    # we have nothing to report... sleep
                    timeout = retry_delay
                    event = None

            finally:
                self._thread_lock.release()

            if event:
                # prepend the session-key
                event.insert(
                    0, {
                        "key": "session-key",
                        "timestamp": time(),
                        "event": self._session_key
                    })
                event.insert(
                    0, {
                        "key": "sequence-number",
                        "timestamp": time(),
                        "event": self._sequence_number
                    })
                self._sequence_number += 1

            if USE_LIVING_LAB_REPORTING:
                if event:
                    try:
                        if status_element.set_value(event):
                            # Living lab's doesn't support dynamic reporting.
                            # We use 60 seconds by default
                            timeout = 60
                        else:
                            # something went wrong...
                            retry_delay *= 2
                            timeout = retry_delay
                    except:
                        # error contacting server
                        print_exc(file=sys.stderr)
                        retry_delay *= 2
                        timeout = retry_delay

            else:
                # add new report
                if event:
                    if len(event) < 10:
                        # uncompressed
                        report = {
                            "version": "3",
                            "created": time(),
                            "event": event
                        }
                    else:
                        # compress
                        report = {
                            "version": "4",
                            "created": time(),
                            "event":
                            urllib.quote(zlib.compress(repr(event), 9))
                        }

                    reports.append(urllib.urlencode(report))

                if not reports:
                    timeout = retry_delay
                    continue

                reporter = report_urls[0]

                if DEBUG:
                    print >> sys.stderr, "EventStatusReporter: attempting to report,", len(
                        reports[0]), "bytes to", reporter[2]
                try:
                    sock = urllib.urlopen(reporter[2], reports[0])
                    result = sock.read()
                    sock.close()

                    # all ok? then remove the report
                    del reports[0]

                    # increase the 'good-report' counter, no need to re-order
                    reporter[1] += 1
                except:
                    # error contacting server
                    print_exc(file=sys.stderr)
                    retry_delay *= 2

                    # increase the 'bad-report' counter and order by failures
                    reporter[0] += 1
                    report_urls.sort(lambda x, y: cmp(x[0], y[0]))
                    continue

                if result.isdigit():
                    result = int(result)
                    if result == 0:
                        # remote server is not recording, so don't bother
                        # sending events
                        if DEBUG:
                            print >> sys.stderr, "EventStatusReporter: received -zero- from the HTTP server. Reporting disabled"
                        self._thread_lock.acquire()
                        self._enable_reporting = False
                        self._thread_lock.release()

                        # close thread
                        return

                    else:
                        # I choose not to reset the retry_delay because
                        # swarmplayer sessions tend to be short. And the
                        # if there are connection failures I want as few
                        # retries as possible
                        if DEBUG:
                            print >> sys.stderr, "EventStatusReporter: report successfull. Next report in", result, "seconds"
                        timeout = result
                else:
                    self._thread_lock.acquire()
                    self._enable_reporting = False
                    self._thread_lock.release()

                    # close thread
                    return