def dynamicHybridNext(self, haves, wantfunc, complete_first): if (not hasattr(self, 'inOrderWindow')): self.inOrderWindow = int( max(0, 0.25 - 0.5 * self.getPercentageOfNotSeedersVOD()) * len(haves)) self.prevDfs = self.streamWatcher.total_dfs * 1000 / self.streamWatcher.total self.lastWindowUpdate = time.time() if ((time.time() - self.lastWindowUpdate) > self.streamWatcher.prefetchT + 5): currDfs = self.streamWatcher.total_dfs * 1000 / self.streamWatcher.total dfsDiff = currDfs - self.prevDfs self.prevDfs = currDfs self.lastWindowUpdate = time.time() if (dfsDiff >= 10): self.inOrderWindow = min(len(haves), self.inOrderWindow + 1) elif (dfsDiff < 1): self.inOrderWindow = max(0, self.inOrderWindow - 1) Logger.getLogger().append( "WINDOW_UPDATE", "%d %d %d" % (currDfs, dfsDiff, self.inOrderWindow)) return (self.hybridNext(self.inOrderWindow, haves, wantfunc, complete_first))
def test_completed(self): try: if self.stats2csv(): stdout.flush() Logger.getLogger().append("STREAMWATCHER", "Exit") Logger.getLogger().flush() os._exit(3) return except: traceback.print_exc() stdout.flush() os._exit(0)
def stats2csv(self): group_size = int(self.config['group_size']) order = int(self.config['order']) end_time = int(self.toKbytes(self.total) / self.rate) Logger.getLogger().append("CSV", "has %d samples" % len(self.stats)) if (len(self.stats) < (end_time / self.prefetch) * 0.5): return 0 with open(self.csvFile, 'wb') as file: writer = csv.writer(file) writer.writerow(['alg', 'dfs', 'p2p']) #pre-gap padding for i in range(0, (order - 1) * self.gap): writer.writerow([self.config['alg'], 0, 0]) #my data t = self.stats[0][0] Logger.getLogger().append("CSV", "first sample time = %d" % t) statsIndex = 0 while (t <= end_time): while t >= self.stats[statsIndex][0]: lastStats = self.stats[statsIndex] statsIndex = statsIndex + 1 factor = float(t - lastStats[0]) / (self.stats[statsIndex][0] - lastStats[0]) dfs = int( math.ceil(lastStats[1] + factor * (self.stats[statsIndex][1] - lastStats[1]))) p2p = int( math.ceil(lastStats[2] + factor * (self.stats[statsIndex][2] - lastStats[2]))) Logger.getLogger().append( "CSV", "t=%d factor=%.2f dfs=%d p2p=%d" % (t, factor, dfs, p2p)) writer.writerow([self.config['alg'], int(dfs), int(p2p)]) t = t + self.prefetch lastStats = self.stats[len(self.stats) - 1] Logger.getLogger().append( "CSV", "t=%d dfs=%d p2p=%d" % (t, lastStats[1], lastStats[2])) writer.writerow([self.config['alg'], lastStats[1], lastStats[2]]) #post-gap padding for i in range(0, (group_size - order) * self.gap): writer.writerow( [self.config['alg'], lastStats[1], lastStats[2]]) file.flush() return 1
def __init__(self, numpieces, rarest_first_cutoff = 1, rarest_first_priority_cutoff = 3, priority_step = 20): self.rarest_first_cutoff = rarest_first_cutoff self.rarest_first_priority_cutoff = rarest_first_priority_cutoff + priority_step self.priority_step = priority_step self.cutoff = rarest_first_priority_cutoff self.numpieces = numpieces self.started = [] self.totalcount = 0 self.numhaves = [0] * numpieces self.priority = [1] * numpieces self.removed_partials = {} self.crosscount = [numpieces] self.crosscount2 = [numpieces] self.has = [0] * numpieces self.numgot = 0 self.done = False self.seed_connections = {} self.past_ips = {} self.seed_time = None self.superseed = False self.seeds_connected = 0 self._init_interests() #### P2PVODEX start #### self.streamWatcher = None self.connecter = None self.storagewrapper = None self.vod_seeds_connected = 0 self.logger = Logger.getLogger() self.rate = sys.maxint
def __init__(self, numpieces, rarest_first_cutoff=1, rarest_first_priority_cutoff=3, priority_step=20): self.rarest_first_cutoff = rarest_first_cutoff self.rarest_first_priority_cutoff = rarest_first_priority_cutoff + priority_step self.priority_step = priority_step self.cutoff = rarest_first_priority_cutoff self.numpieces = numpieces self.started = [] self.totalcount = 0 self.numhaves = [0] * numpieces self.priority = [1] * numpieces self.removed_partials = {} self.crosscount = [numpieces] self.crosscount2 = [numpieces] self.has = [0] * numpieces self.numgot = 0 self.done = False self.seed_connections = {} self.past_ips = {} self.seed_time = None self.superseed = False self.seeds_connected = 0 self._init_interests() #### P2PVODEX start #### self.streamWatcher = None self.connecter = None self.storagewrapper = None self.vod_seeds_connected = 0 self.logger = Logger.getLogger() self.rate = sys.maxint
def dynamicHybridNext(self, haves, wantfunc, complete_first): if (not hasattr(self, 'inOrderWindow')): self.inOrderWindow = int(max(0, 0.25 - 0.5 * self.getPercentageOfNotSeedersVOD()) * len(haves)) self.prevDfs = self.streamWatcher.total_dfs*1000 / self.streamWatcher.total self.lastWindowUpdate = time.time() if ((time.time() - self.lastWindowUpdate) > self.streamWatcher.prefetchT + 5): currDfs = self.streamWatcher.total_dfs*1000 / self.streamWatcher.total dfsDiff = currDfs - self.prevDfs self.prevDfs = currDfs self.lastWindowUpdate = time.time() if (dfsDiff >= 10): self.inOrderWindow = min(len(haves), self.inOrderWindow + 1) elif (dfsDiff < 1): self.inOrderWindow = max(0, self.inOrderWindow - 1) Logger.getLogger().append("WINDOW_UPDATE", "%d %d %d" % (currDfs, dfsDiff,self.inOrderWindow)) return (self.hybridNext(self.inOrderWindow, haves, wantfunc, complete_first))
def stats2csv(self): group_size = int(self.config['group_size']) order = int(self.config['order']) end_time = int(self.toKbytes(self.total) / self.rate) Logger.getLogger().append("CSV", "has %d samples" % len(self.stats)) if (len(self.stats) < (end_time / self.prefetch) * 0.5): return 0 with open(self.csvFile, 'wb') as file: writer = csv.writer(file) writer.writerow(['alg','dfs','p2p']) #pre-gap padding for i in range(0, (order - 1) * self.gap): writer.writerow([self.config['alg'],0,0]) #my data t = self.stats[0][0] Logger.getLogger().append("CSV", "first sample time = %d" % t) statsIndex = 0 while (t <= end_time) : while t >= self.stats[statsIndex][0]: lastStats = self.stats[statsIndex] statsIndex = statsIndex + 1 factor = float(t - lastStats[0]) / (self.stats[statsIndex][0] - lastStats[0]) dfs = int(math.ceil(lastStats[1] + factor * (self.stats[statsIndex][1] - lastStats[1]))) p2p = int(math.ceil(lastStats[2] + factor * (self.stats[statsIndex][2] - lastStats[2]))) Logger.getLogger().append("CSV", "t=%d factor=%.2f dfs=%d p2p=%d" % (t, factor, dfs, p2p)) writer.writerow([self.config['alg'],int(dfs),int(p2p)]) t = t + self.prefetch lastStats = self.stats[len(self.stats) - 1] Logger.getLogger().append("CSV", "t=%d dfs=%d p2p=%d" % (t, lastStats[1], lastStats[2])) writer.writerow([self.config['alg'],lastStats[1],lastStats[2]]) #post-gap padding for i in range(0, (group_size - order) * self.gap): writer.writerow([self.config['alg'],lastStats[1],lastStats[2]]) file.flush() return 1
def _rechoke(self, isVODPreferred = False): preferred = [] maxuploads = self.config['max_uploads'] if self.paused: for c in self.connections: c.get_upload().choke() return if maxuploads > 1: for c in self.connections: u = c.get_upload() #HILLEL: d = c.get_download() if (d.get_rate() != u.get_rate()) and (u.get_rate()>(d.get_rate()/4)): u.choke() #------- if not u.is_interested(): continue if self.done(): r = u.get_rate() else: d = c.get_download() r = d.get_rate() if r < 1000 or d.is_snubbed(): continue #### P2PVODEX start #### if self._finishedViewingMovie() and not self.passedEndOfMovieFlag: Logger.getLogger().append("CHOKER","Movie finish point viewing point - %d , total pieces - %d" % (self.picker.getViewingPoint() , self.picker.numpieces)) self.passedEndOfMovieFlag = True if isVODPreferred or self._finishedViewingMovie(): conncetionIndex = 2 preferred.append((int(not u.connection.isVODPeer()), -r ,c)) else: conncetionIndex = 1 preferred.append((-r ,c)) self.last_preferred = len(preferred) preferred.sort() del preferred[maxuploads-1:] preferred = [x[conncetionIndex] for x in preferred] #if len(preferred) > 0: #Logger.getLogger().append("CHOKER","Top of preferred list is - %s" % preferred[0].get_id()) count = len(preferred) hit = False to_unchoke = [] for c in self.connections: u = c.get_upload() if c in preferred: to_unchoke.append(u) else: if count < maxuploads or not hit: to_unchoke.append(u) if u.is_interested(): count += 1 hit = True else: u.choke() #Logger.getLogger().append("CHOKER","Choking - %s" % u.connection.get_id()) for u in to_unchoke: #Logger.getLogger().append("CHOKER","Unchoking - %s" % u.connection.get_id()) u.unchoke()
def _rechoke(self, isVODPreferred=False): preferred = [] maxuploads = self.config['max_uploads'] if self.paused: for c in self.connections: c.get_upload().choke() return if maxuploads > 1: for c in self.connections: u = c.get_upload() #HILLEL: d = c.get_download() if (d.get_rate() != u.get_rate()) and (u.get_rate() > (d.get_rate() / 4)): u.choke() #------- if not u.is_interested(): continue if self.done(): r = u.get_rate() else: d = c.get_download() r = d.get_rate() if r < 1000 or d.is_snubbed(): continue #### P2PVODEX start #### if self._finishedViewingMovie( ) and not self.passedEndOfMovieFlag: Logger.getLogger().append( "CHOKER", "Movie finish point viewing point - %d , total pieces - %d" % (self.picker.getViewingPoint(), self.picker.numpieces)) self.passedEndOfMovieFlag = True if isVODPreferred or self._finishedViewingMovie(): conncetionIndex = 2 preferred.append( (int(not u.connection.isVODPeer()), -r, c)) else: conncetionIndex = 1 preferred.append((-r, c)) self.last_preferred = len(preferred) preferred.sort() del preferred[maxuploads - 1:] preferred = [x[conncetionIndex] for x in preferred] #if len(preferred) > 0: #Logger.getLogger().append("CHOKER","Top of preferred list is - %s" % preferred[0].get_id()) count = len(preferred) hit = False to_unchoke = [] for c in self.connections: u = c.get_upload() if c in preferred: to_unchoke.append(u) else: if count < maxuploads or not hit: to_unchoke.append(u) if u.is_interested(): count += 1 hit = True else: u.choke() #Logger.getLogger().append("CHOKER","Choking - %s" % u.connection.get_id()) for u in to_unchoke: #Logger.getLogger().append("CHOKER","Unchoking - %s" % u.connection.get_id()) u.unchoke()