def next_have(self, connection, looser_upload): if self.seed_time is None: self.seed_time = clock() return None if clock( ) < self.seed_time + 10: # wait 10 seconds after seeing the first peers return None # to give time to grab have lists if not connection.upload.super_seeding: return None if connection in self.seed_connections: if looser_upload: num = 1 # send a new have even if it hasn't spread that piece elsewhere else: num = 2 if self.seed_got_haves[self.seed_connections[connection]] < num: return None if not connection.upload.was_ever_interested: # it never downloaded it? connection.upload.skipped_count += 1 if connection.upload.skipped_count >= 3: # probably another stealthed seed return -1 # signal to close it for tier in self.interests: for piece in tier: if not connection.download.have[piece]: seedint = self.level_in_interests[piece] self.level_in_interests[ piece] += 1 # tweak it up one, so you don't duplicate effort if seedint == len(self.interests) - 1: self.interests.append([]) self._shift_over(piece, self.interests[seedint], self.interests[seedint + 1]) self.seed_got_haves[piece] = 0 # reset this self.seed_connections[connection] = piece connection.upload.seed_have_list.append(piece) return piece return -1 # something screwy; terminate connection
def next_have(self, connection, looser_upload): if self.seed_time is None: self.seed_time = clock() return None if clock() < self.seed_time+10: # wait 10 seconds after seeing the first peers return None # to give time to grab have lists if not connection.upload.super_seeding: return None if connection in self.seed_connections: if looser_upload: num = 1 # send a new have even if it hasn't spread that piece elsewhere else: num = 2 if self.seed_got_haves[self.seed_connections[connection]] < num: return None if not connection.upload.was_ever_interested: # it never downloaded it? connection.upload.skipped_count += 1 if connection.upload.skipped_count >= 3: # probably another stealthed seed return -1 # signal to close it for tier in self.interests: for piece in tier: if not connection.download.have[piece]: seedint = self.level_in_interests[piece] self.level_in_interests[piece] += 1 # tweak it up one, so you don't duplicate effort if seedint == len(self.interests) - 1: self.interests.append([]) self._shift_over(piece, self.interests[seedint], self.interests[seedint + 1]) self.seed_got_haves[piece] = 0 # reset this self.seed_connections[connection] = piece connection.upload.seed_have_list.append(piece) return piece return -1 # something screwy; terminate connection
def is_snubbed(self): # 2fastbt_ if not self.choked and clock() - self.last2 > self.downloader.snub_time and \ not self.connection.connection.is_helper_con() and \ not self.connection.connection.is_coordinator_con(): # _2fastbt for index, begin, length in self.active_requests: self.connection.send_cancel(index, begin, length) self.got_choke() # treat it just like a choke return clock() - self.last > self.downloader.snub_time
def num_disconnected_seeds(self): # first expire old ones expired = [] for id, t in self.disconnectedseeds.items(): if clock() - t > EXPIRE_TIME: #Expire old seeds after so long expired.append(id) for id in expired: # self.picker.seed_disappeared() del self.disconnectedseeds[id] return len(self.disconnectedseeds)
def num_disconnected_seeds(self): # first expire old ones expired = [] for id, t in self.disconnectedseeds.items(): if clock() - t > EXPIRE_TIME: # Expire old seeds after so long expired.append(id) for id in expired: # self.picker.seed_disappeared() del self.disconnectedseeds[id] return len(self.disconnectedseeds)
def __init__(self, infohash, storage, picker, backlog, max_rate_period, numpieces, chunksize, measurefunc, snub_time, kickbans_ok, kickfunc, banfunc, scheduler = None, supporter_ips = []): self.supporter_ips = supporter_ips self.infohash = infohash self.b64_infohash = b64encode(infohash) self.storage = storage self.picker = picker self.backlog = backlog self.max_rate_period = max_rate_period self.measurefunc = measurefunc self.totalmeasure = Measure(max_rate_period*storage.piece_length/storage.request_size) self.numpieces = numpieces self.chunksize = chunksize self.snub_time = snub_time self.kickfunc = kickfunc self.banfunc = banfunc self.disconnectedseeds = {} self.downloads = [] self.perip = {} self.gotbaddata = {} self.kicked = {} self.banned = {} self.kickbans_ok = kickbans_ok self.kickbans_halted = False self.super_seeding = False self.endgamemode = False self.endgame_queued_pieces = [] self.all_requests = [] self.discarded = 0L self.download_rate = 0 # self.download_rate = 25000 # 25K/s test rate self.bytes_requested = 0 self.last_time = clock() self.queued_out = {} self.requeueing = False self.paused = False self.scheduler = scheduler # SmoothIT_ self.logger = logging.getLogger("Tribler.Downloader") # _SmoothIT # hack: we should not import this since it is not part of the # core nor should we import here, but otherwise we will get # import errors # # _event_reporter stores events that are logged somewhere... from BaseLib.Core.Statistics.StatusReporter import get_reporter_instance self._event_reporter = get_reporter_instance() # check periodicaly self.scheduler(self.dlr_periodic_check, 1) self.support_required = True
def update(self): if DEBUG: print >>sys.stderr, "RatePredictor:update" self.raw_server.add_task(self.update, self.probing_period) current_value = self.rate_measure.get_rate() / 1000. current_time = clock() if self.value is None or current_time - self.timestamp > self.max_period: self.value = current_value else: self.value = self.alpha * current_value + (1 - self.alpha) * self.value if self.max_rate > 0 and self.value > self.max_rate: self.value = self.max_rate self.timestamp = current_time
def update(self): if DEBUG: print >> sys.stderr, "RatePredictor:update" self.raw_server.add_task(self.update, self.probing_period) current_value = self.rate_measure.get_rate() / 1000. current_time = clock() if self.value is None or current_time - self.timestamp > self.max_period: self.value = current_value else: self.value = self.alpha * current_value + (1 - self.alpha) * self.value if self.max_rate > 0 and self.value > self.max_rate: self.value = self.max_rate self.timestamp = current_time
def _round_robin(self): self.schedule(self._round_robin, 5) if self.super_seed: cons = range(len(self.connections)) to_close = [] count = self.config['min_uploads']-self.last_preferred if count > 0: # optimization shuffle(cons) for c in cons: # SelectiveSeeding if self.seeding_manager is None or self.seeding_manager.is_conn_eligible(c): i = self.picker.next_have(self.connections[c], count > 0) if i is None: continue if i < 0: to_close.append(self.connections[c]) continue self.connections[c].send_have(i) count -= 1 else: # Drop non-eligible connections to_close.append(self.connections[c]) for c in to_close: c.close() if self.last_round_robin + self.round_robin_period < clock(): self.last_round_robin = clock() for i in xrange(1, len(self.connections)): c = self.connections[i] # SelectiveSeeding if self.seeding_manager is None or self.seeding_manager.is_conn_eligible(c): u = c.get_upload() if u.is_choked() and u.is_interested(): self.connections = self.connections[i:] + self.connections[:i] break self._rechoke()
def __init__(self, infohash, storage, picker, backlog, max_rate_period, numpieces, chunksize, measurefunc, snub_time, kickbans_ok, kickfunc, banfunc, scheduler = None): self.infohash = infohash self.b64_infohash = b64encode(infohash) self.storage = storage self.picker = picker self.backlog = backlog self.max_rate_period = max_rate_period self.measurefunc = measurefunc self.totalmeasure = Measure(max_rate_period*storage.piece_length/storage.request_size) self.numpieces = numpieces self.chunksize = chunksize self.snub_time = snub_time self.kickfunc = kickfunc self.banfunc = banfunc self.disconnectedseeds = {} self.downloads = [] self.perip = {} self.gotbaddata = {} self.kicked = {} self.banned = {} self.kickbans_ok = kickbans_ok self.kickbans_halted = False self.super_seeding = False self.endgamemode = False self.endgame_queued_pieces = [] self.all_requests = [] self.discarded = 0L self.download_rate = 0 # self.download_rate = 25000 # 25K/s test rate self.bytes_requested = 0 self.last_time = clock() self.queued_out = {} self.requeueing = False self.paused = False self.scheduler = scheduler # hack: we should not import this since it is not part of the # core nor should we import here, but otherwise we will get # import errors # # _event_reporter stores events that are logged somewhere... # from BaseLib.Core.Statistics.StatusReporter import get_reporter_instance # self._event_reporter = get_reporter_instance() self._event_reporter = get_status_holder("LivingLab") # check periodicaly self.scheduler(self.dlr_periodic_check, 1)
def __init__(self, config, schedule, picker, seeding_selector, done = lambda: False): self.config = config self.round_robin_period = config['round_robin_period'] self.schedule = schedule self.picker = picker self.connections = [] self.last_preferred = 0 self.last_round_robin = clock() self.done = done self.super_seed = False self.paused = False schedule(self._round_robin, 5) # SelectiveSeeding self.seeding_manager = None
def queue_limit(self): if not self.download_rate: return 10e10 # that's a big queue! t = clock() self.bytes_requested -= (t - self.last_time) * self.download_rate self.last_time = t if not self.requeueing and self.queued_out and self.bytes_requested < 0: self.requeueing = True q = self.queued_out.keys() shuffle(q) self.queued_out = {} for d in q: d._request_more() self.requeueing = False if -self.bytes_requested > 5*self.download_rate: self.bytes_requested = -5*self.download_rate ql = max(int(-self.bytes_requested/self.chunksize), 0) # if DEBUG: # print >> sys.stderr, 'Downloader: download_rate: %s, bytes_requested: %s, chunk: %s -> queue limit: %d' % \ # (self.download_rate, self.bytes_requested, self.chunksize, ql) return ql
def queue_limit(self): if not self.download_rate: return 10e10 # that's a big queue! t = clock() self.bytes_requested -= (t - self.last_time) * self.download_rate self.last_time = t if not self.requeueing and self.queued_out and self.bytes_requested < 0: self.requeueing = True q = self.queued_out.keys() shuffle(q) self.queued_out = {} for d in q: d._request_more() self.requeueing = False if -self.bytes_requested > 5 * self.download_rate: self.bytes_requested = -5 * self.download_rate ql = max(int(-self.bytes_requested / self.chunksize), 0) # if DEBUG: # print >> sys.stderr, 'Downloader: download_rate: %s, bytes_requested: %s, chunk: %s -> queue limit: %d' % \ # (self.download_rate, self.bytes_requested, self.chunksize, ql) return ql
def __init__(self, config, schedule, picker, seeding_selector, done = lambda: False): self.config = config self.round_robin_period = config['round_robin_period'] self.schedule = schedule self.picker = picker self.connections = [] self.last_preferred = 0 self.last_round_robin = clock() self.done = done self.super_seed = False self.paused = False schedule(self._round_robin, 5) # SelectiveSeeding self.seeding_manager = None # _smoothit self.biasedUnchoking = BiasedUnchoking.getInstance() self._is_supporter_server = False # for supporter self._supportee_list = [] # for supporter # TODO: unify usage of IDs via the StaticConfig #SmoothIT_ self.logger = logging.getLogger("Tribler.Choker")
def __init__(self, config, schedule, picker, seeding_selector, done=lambda: False): self.config = config self.round_robin_period = config["round_robin_period"] self.schedule = schedule self.picker = picker self.connections = [] self.last_preferred = 0 self.last_round_robin = clock() self.done = done self.super_seed = False self.paused = False schedule(self._round_robin, 5) # SelectiveSeeding self.seeding_manager = None # _smoothit self.biasedUnchoking = BiasedUnchoking.getInstance() self._is_supporter_server = False # for supporter self._supportee_list = [] # for supporter # TODO: unify usage of IDs via the StaticConfig # SmoothIT_ self.logger = logging.getLogger("Tribler.Choker")
def add_disconnected_seed(self, id): # if not self.disconnectedseeds.has_key(id): # self.picker.seed_seen_recently() self.disconnectedseeds[id]=clock()
def got_unchoke(self): if self.choked: self.choked = False if self.interested: self._request_more(new_unchoke=True) self.last2 = clock()
def got_piece(self, index, begin, hashlist, piece): """ Returns True if the piece is complete. Note that in this case a -piece- means a chunk! """ # SmoothIT_ # print >>sys.stderr, "Downloader got piece (%i, %i, %i) from (%s:%s=%s) at %d" % (index, begin, len(piece), self.connection.get_ip(), self.connection.get_port(), self.connection.get_id(), time.time()) try: entry = (index, begin, len(piece), time.time()) self.block_stats.append(entry) # print >>sys.stderr, "Block stats after last piece: %s" % self.block_stats except: print >>sys.stderr, "Unexpected error:", sys.exc_info() # _SmoothIT if self.bad_performance_counter: self.bad_performance_counter -= 1 if DEBUG: print >>sys.stderr, "decreased bad_performance_counter to", self.bad_performance_counter length = len(piece) # if DEBUG: # print >> sys.stderr, 'Downloader: got piece of length %d' % length try: self.active_requests.remove((index, begin, length)) except ValueError: self.downloader.discarded += length return False if self.downloader.endgamemode: self.downloader.all_requests.remove((index, begin, length)) if DEBUG: print >>sys.stderr, "Downloader: got_piece: removed one request from all_requests", len( self.downloader.all_requests ), "remaining" self.last = clock() self.last2 = clock() self.measure.update_rate(length) # Update statistic gatherer status = Status.get_status_holder("LivingLab") s_download = status.get_or_create_status_element("downloaded", 0) s_download.inc(length) self.short_term_measure.update_rate(length) self.downloader.measurefunc(length) if not self.downloader.storage.piece_came_in(index, begin, hashlist, piece, self.guard): self.downloader.piece_flunked(index) return False # boudewijn: we need more accurate (if possibly invalid) # measurements on current download speed self.downloader.picker.got_piece(index, begin, length) if self.downloader.storage.do_I_have(index): self.downloader.picker.complete(index) if self.downloader.endgamemode: for d in self.downloader.downloads: if d is not self: if d.interested: if d.choked: assert not d.active_requests d.fix_download_endgame() else: try: d.active_requests.remove((index, begin, length)) except ValueError: continue d.connection.send_cancel(index, begin, length) d.fix_download_endgame() else: assert not d.active_requests self._request_more() self.downloader.check_complete(index) # BarterCast counter self.connection.total_downloaded += length return self.downloader.storage.do_I_have(index)
def got_piece(self, index, begin, hashlist, piece): """ Returns True if the piece is complete. Note that in this case a -piece- means a chunk! """ #SmoothIT_ #print >>sys.stderr, "Downloader got piece (%i, %i, %i) from (%s:%s=%s) at %d" % (index, begin, len(piece), self.connection.get_ip(), self.connection.get_port(), self.connection.get_id(), time.time()) try: entry = (index, begin, len(piece), time.time()) self.block_stats.append(entry) #print >>sys.stderr, "Block stats after last piece: %s" % self.block_stats except: print >>sys.stderr, "Unexpected error:", sys.exc_info() #_SmoothIT if self.bad_performance_counter: self.bad_performance_counter -= 1 if DEBUG: print >>sys.stderr, "decreased bad_performance_counter to", self.bad_performance_counter length = len(piece) #if DEBUG: # print >> sys.stderr, 'Downloader: got piece of length %d' % length try: self.active_requests.remove((index, begin, length)) except ValueError: self.downloader.discarded += length return False if self.downloader.endgamemode: self.downloader.all_requests.remove((index, begin, length)) if DEBUG: print >>sys.stderr, "Downloader: got_piece: removed one request from all_requests", len(self.downloader.all_requests), "remaining" self.last = clock() self.last2 = clock() self.measure.update_rate(length) # Update statistic gatherer status = Status.get_status_holder("LivingLab") s_download = status.get_or_create_status_element("downloaded",0) s_download.inc(length) self.short_term_measure.update_rate(length) self.downloader.measurefunc(length) if not self.downloader.storage.piece_came_in(index, begin, hashlist, piece, self.guard): self.downloader.piece_flunked(index) return False # boudewijn: we need more accurate (if possibly invalid) # measurements on current download speed self.downloader.picker.got_piece(index, begin, length) if self.downloader.storage.do_I_have(index): self.downloader.picker.complete(index) if self.downloader.endgamemode: for d in self.downloader.downloads: if d is not self: if d.interested: if d.choked: assert not d.active_requests d.fix_download_endgame() else: try: d.active_requests.remove((index, begin, length)) except ValueError: continue d.connection.send_cancel(index, begin, length) d.fix_download_endgame() else: assert not d.active_requests self._request_more() self.downloader.check_complete(index) # BarterCast counter self.connection.total_downloaded += length return self.downloader.storage.do_I_have(index)
def got_unchoke(self): if self.choked: self.choked = False if self.interested: self._request_more(new_unchoke = True) self.last2 = clock()
def add_disconnected_seed(self, id): # if not self.disconnectedseeds.has_key(id): # self.picker.seed_seen_recently() self.disconnectedseeds[id] = clock()