def update_receiving(self, chunkinfo): if self.lossratetype == "packet": self.receivedN += chunkinfo.packetN elif self.lossratetype == "chunk": self.receivedN += 1 else: log.warn("unknow lossratetype: %s" %(self.lossratetype))
def update_loss(self, chunkinfo): self.lostN += 1 #self.lostmaxN += chunkinfo.packetN if self.lossratetype == "packet": self.receivedN += chunkinfo.packetN - 1 elif self.lossratetype == "chunk": self.receivedN += 0 else: log.warn("unknow lossratetype: %s" %(self.lossratetype)) self.sample()
def start(self): """a big different with the way, self.handle.run(-1), which cann't catch the signal interrupt all all, even if its parent thread however, with while loop check, the parent thread can catch the signal, for the whole process won't sink in self.handle.run() """ log.warn("%s begin to request %s" %(self.Id, self.ndn_name)) self.status = Controller.STATUS_ON self.first_express_interest() #self.handle.run(-1) while self.status != Controller.STATUS_OFF: #print "test! status=%s" %(self.status) if self.status == Controller.STATUS_ON: self.handle.run(DEFAULT_NDN_RUN_TIMEOUT) elif self.status == Controller.STATUS_STEP_ON: self.handle.run(DEFAULT_NDN_RUN_TIMEOUT) self.status = Controller.STATUS_STEP_OFF elif self.status == Controller.STATUS_STEP_OFF: time.sleep(1) return self.is_all
def in_order_content(self, upcallInfo): """the best scenario, content is received in-order, however, we should check those buffered out-of-order chunks """ name = str(upcallInfo.Interest.name) chunkinfo = self.mydata.unsatisfied_chunks_keyed_by_name.pop(name) if not self.fout.closed: self.fout.write(chunkinfo.content) else: log.critical("fails to write content") self.mydata.accept_bytes += chunkinfo.data_size if not self.cache_data: chunkinfo.content = None chunkinfo.status = 2 #satisfied yet self.mydata.satisfied_chunkN += 1 self.mydata.expected_chunkI += 1 self.window.update_receiving(chunkinfo) log.debug("received in-order Data: %s, out packet: %s" %(name, self.window.packet_on_the_fly)) #check the out-of-order contents recevied before for name in self.mydata.unsatisfied_chunks_keyed_by_name.keys(): chunkinfo = self.mydata.unsatisfied_chunks_keyed_by_name[name] if chunkinfo.endT == None: break else: chunkinfo = self.mydata.unsatisfied_chunks_keyed_by_name.pop(name) if not self.fout.closed: self.fout.write(chunkinfo.content) else: log.critical("fails to write content") self.mydata.accept_bytes += chunkinfo.data_size if not self.cache_data: chunkinfo.content = None self.mydata.expected_chunkI += 1 chunkinfo.status = 2 #satisfied yet if self.mydata.final_byte == self.mydata.accept_bytes: self.is_all = True for chunkinfo in self.mydata.unsatisfied_chunks_keyed_by_name.itervalues(): log.warn(str(chunkinfo)) log.warn("------------------------ %s all the contents are received---------------------------" %(self.Id)) self.stop()
def upcall(self, kind, upcallInfo): if kind != pyndn.UPCALL_INTEREST: log.warn("get kind: %s" %str(kind)) return pyndn.RESULT_OK co = self.prepare(upcallInfo) # try: # co = self.prepare(upcallInfo) # except: # thetype, value, traceback = sys.exc_info() # log.error("get exception: %s, %s, %s" %(thetype, value, traceback)) # co = None if co == None: log.warn("co == None") pass else: rst = self.handle.put(co) if rst < 0: log.warn("fail put content: %s, result: %s" %(co.ndn_name, rst)) else: log.debug("successfully put content, and returns %s"%(rst)) pass #("content: %s" %(co.ndn_name)) return pyndn.RESULT_INTEREST_CONSUMED
def do_meet_accident(self, kind, upcallInfo): name = str(upcallInfo.Interest.name) if not name in self.mydata.unsatisfied_chunks_keyed_by_name: #since it's not possible that two same Interest on the fly at the same time, it sholdn't happen log.error("timeout Interest not in the unsatisfied list, it should not happend: %s!!" %(name)) return chunkinfo = self.mydata.unsatisfied_chunks_keyed_by_name[name] self.chunkSizeEstimator.update_loss(chunkinfo) self.window.update_loss(chunkinfo) if kind == 4: log.debug("timeout, Interest=%s, out packet: %d" \ %(upcallInfo.Interest.name, self.window.packet_on_the_fly)) #log.warn("%s" %(upcallInfo)) else: log.warn("-----------------strange accident: kind=%s, Interest=%s------------------" %(kind, upcallInfo.Interest.name)) #window check here if self.window.packet_on_the_fly < self.window.get_cwnd(): #it's already make sure that the chunk is not satisfied yet, but it could be illegal self.re_express_interest(chunkinfo) else: chunkinfo.status = 0 #wait for re-expressing
def extend(self, plt): #extend line log.warn("wait for override") pass
def duplicate_content(self, upcallInfo): """receive a duplicate content""" log.warn("received duplicated Data: %s" %(upcallInfo.Interest.name))
return base_url + poster_size # Loading API /configuration image_base_url = get_image_base_url(API_KEY) log.info('Opening movies files...') with open('csv/pre-movies.csv', newline='') as original_file, open('csv/movies.csv', 'w', newline='') as final_file: reader = csv.reader(original_file, delimiter=',') next(reader, None) # skip the headers writer = csv.writer(final_file, delimiter=',', quoting=csv.QUOTE_MINIMAL) writer.writerow(['_id', 'title', 'genres', 'poster']) # prepare headers for row in reader: localID = row[0] title = row[1] genres = row[2] log.info('Local ID: {}'.format(localID)) poster = get_poster_for_title(title) if poster: poster = image_base_url + poster else: log.warn('No poster for ID: {} Title: "{}"'.format(localID, title)) poster = '' writer.writerow([localID, title, genres, poster])