def main(): if len(sys.argv) <= 2: print(f'Usage: {sys.argv[0]} <name> <file>') exit(0) logging.basicConfig(format='[{asctime}]{levelname}:{message}', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO, style='{') app = NDNApp() name = Name.normalize(sys.argv[1]) name.append(Component.from_version(timestamp())) with open(sys.argv[2], 'rb') as f: data = f.read() seg_cnt = (len(data) + SEGMENT_SIZE - 1) // SEGMENT_SIZE packets = [app.prepare_data(name + [Component.from_segment(i)], data[i*SEGMENT_SIZE:(i+1)*SEGMENT_SIZE], freshness_period=10000, final_block_id=Component.from_segment(seg_cnt - 1)) for i in range(seg_cnt)] print(f'Created {seg_cnt} chunks under name {Name.to_str(name)}') @app.route(name) def on_interest(int_name, _int_param, _app_param): if Component.get_type(int_name[-1]) == Component.TYPE_SEGMENT: seg_no = Component.to_number(int_name[-1]) else: seg_no = 0 if seg_no < seg_cnt: app.put_raw_packet(packets[seg_no]) app.run_forever()
def _prepare_data(self, file_path: str, name_at_repo, segment_size: int, freshness_period: int, cpu_count: int): """ Shard file into data packets. :param file_path: Local FS path to file to insert :param name_at_repo: Name used to store file at repo """ if not os.path.exists(file_path): logging.error(f'file {file_path} does not exist') return 0 with open(file_path, 'rb') as binary_file: b_array = bytearray(binary_file.read()) if len(b_array) == 0: logging.warning("File is empty") return 0 # use multiple threads to speed up creating TLV seg_cnt = (len(b_array) + segment_size - 1) // segment_size final_block_id = Component.from_segment(seg_cnt - 1) packet_params = [[ name_at_repo + [Component.from_segment(seq)], b_array[seq * segment_size:(seq + 1) * segment_size], freshness_period, final_block_id, ] for seq in range(seg_cnt)] with multiprocessing.Pool(processes=cpu_count) as p: self.encoded_packets = p.starmap(_create_packets, packet_params)
def on_interest(self, name: FormalName, _param: InterestParam, _app_param: typing.Optional[BinaryStr]): # Get the name and segment number if Component.get_type(name[-1]) == Component.TYPE_SEGMENT: obj_name = Component.get_value(name[-2]) seg_no = Component.to_number(name[-1]) else: obj_name = Component.get_value(name[-1]) seg_no = 0 # Read the data # Git objects are small so we can read the whole object try: obj_type, data = self.repo.read_obj(bytes(obj_name)) except ValueError: logging.warning(f'Requested file {obj_name} does not exist in repo {self.repo.repo_name}') return # Extract the segment and calculate Name data_name = self.prefix + [Component.from_bytes(obj_name), Component.from_segment(seg_no)] start_pos = seg_no * SEGMENTATION_SIZE data_seg = data[start_pos:start_pos + SEGMENTATION_SIZE] packet_obj = SyncObject() packet_obj.obj_type = obj_type.encode() packet_obj.obj_data = data_seg wire = packet_obj.encode() final_block = (len(data) + SEGMENTATION_SIZE - 1) // SEGMENTATION_SIZE self.app.put_data(data_name, wire, freshness_period=3600000, final_block_id=Component.from_segment(final_block)) logging.debug(f'Responded {obj_name} segment {final_block} in repo {self.repo.repo_name}')
def on_interest(name: FormalName, param: InterestParam, _app_param: Optional[BinaryStr]): logging.info(f'>> I: {Name.to_str(name)}, {param}') request = Name.to_str(name).split("/") print("handle Interest Name", Name.to_str(name)) if request[-2] == "metadata": print("handle Meta data") # content = json.dumps(list(pred_frame_buffer)).encode() # content = str(current_I_frame).encode() content = Name.to_str( name + [Component.from_number(current_I_frame, 0)]).encode() name = name app.put_data(name, content=content, freshness_period=300) logging.info("handle to name " + Name.to_str(name)) elif request[-3] == "frame": interest_frame_num = int(request[-1]) if interest_frame_num in frame_buffer_dict: content = frame_buffer_dict[interest_frame_num] app.put_data(name + [b'\x08\x02\x00\x00'], content=content, freshness_period=2000, final_block_id=Component.from_segment(0)) print( f'handle interest: publish pending interest' + Name.to_str(name) + "------------/" + str(interest_frame_num) + "length: ", len(content)) else: interest_buffer.append([interest_frame_num, name]) else: print("handle Request missing ", Name.to_str(name)) while len(interest_buffer) > 0 and len( frame_buffer) > 0 and frame_buffer[-1] >= interest_buffer[0][0]: pendingInterest = interest_buffer.popleft() pendingFN = pendingInterest[0] pendingName = pendingInterest[1] if pendingFN in frame_buffer_dict: content = frame_buffer_dict[pendingFN] app.put_data(pendingName + [b'\x08\x02\x00\x00'], content=content, freshness_period=2000, final_block_id=Component.from_segment(0)) print( f'handle interest: publish pending interest' + Name.to_str(pendingName) + "------------/" + str(pendingFN) + "length: ", len(content))
def test_number(): assert Component.from_segment(13) == b'!\x01\r' assert Component.from_byte_offset(13) == b'\x22\x01\r' assert Component.from_sequence_num(13) == b'%\x01\r' assert Component.from_version(13) == b'#\x01\r' timeval = 15686790223318112 comp = Component.from_timestamp(timeval) assert Component.get_type(comp) == 36 assert Component.get_value(comp) == b'\x00\x37\xbb\x0d\x76\xed\x4c\x60' assert Component.to_number(comp) == timeval
def encode_file_into_packets(self, file_info): num_segs = file_info.num_segs logging.info("Encoding {} into {} packets".format(file_info.filename, file_info.num_segs)) encoded_packets = [] with open(file_info.relative_path, 'rb') as infile: name_components = [ comp for comp in file_info.get_ndn_name().split("/") if comp ] name = Name.normalize(name_components) logging.info("Segmenting file under {}".format(Name.to_str(name))) data = infile.read() final_block_id = Component.from_segment(num_segs-1) for i in range(num_segs): full_name = name + [Component.from_segment(i)] encoded_packet = EncodedPacket(Name.to_str(full_name), full_name, data[i*file_info.block_size:(i+1)*file_info.block_size], self.freshness_period, final_block_id, self.app) logging.info("Encoded packet: {}".format(encoded_packet.original_name)) encoded_packets.append(encoded_packet) return encoded_packets
def test_number(): assert Component.from_segment(13) == b'!\x01\r' assert Component.from_byte_offset(13) == b'\x22\x01\r' assert Component.from_sequence_num(13) == b'%\x01\r' assert Component.from_version(13) == b'#\x01\r' timeval = 15686790223318112 comp = Component.from_timestamp(timeval) assert Component.get_type(comp) == 36 assert Component.get_value(comp) == b'\x00\x37\xbb\x0d\x76\xed\x4c\x60' assert Component.to_number(comp) == timeval assert Component.to_str(Component.from_segment(13)) == 'seg=13' assert Component.to_str(Component.from_byte_offset(13)) == 'off=13' assert Component.to_str(Component.from_sequence_num(13)) == 'seq=13' assert Component.to_str(Component.from_version(13)) == 'v=13' assert Component.to_str( Component.from_timestamp(timeval)) == 't=15686790223318112' assert Component.from_str('seg=13') == b'!\x01\r' assert Component.from_str('off=13') == b'\x22\x01\r' assert Component.from_str('seq=13') == b'%\x01\r' assert Component.from_str('v=13') == b'#\x01\r' assert Component.from_str( 't=15686790223318112') == b'$\x08\x007\xbb\rv\xedL`'
def on_interest(inst_name: FormalName, inst_param: InterestParam, app_param: BinaryStr): d = json.loads(app_param.tobytes().decode()) enc_session_key = base64.b64decode(d['enc_session_key']) nonce = base64.b64decode(d['nonce']) if Component.get_type(inst_name[-1]) == Component.TYPE_SEGMENT: seg_no = Component.to_number(inst_name[-1]) else: seg_no = 0 if seg_no < seg_cnt: app.put_data(inst_name, packets[seg_no], final_block_id=Component.from_segment(seg_cnt - 1), freshness_period=10000)
async def _retry(seq: int): """ Retry 3 times fetching data of the given sequence number or fail. :param seq: block_id of data """ nonlocal app, name, semaphore, is_failed, received_or_fail, final_id int_name = name + [Component.from_segment(seq)] trial_times = 0 while True: trial_times += 1 if trial_times > 3: semaphore.release() is_failed = True received_or_fail.set() return try: logging.info('Express Interest: {}'.format( Name.to_str(int_name))) data_name, meta_info, content, data_bytes = await app.express_interest( int_name, need_raw_packet=True, must_be_fresh=False, can_be_prefix=False, lifetime=1000, **kwargs) # Save data and update final_id logging.info('Received data: {}'.format( Name.to_str(data_name))) seq_to_data_packet[seq] = (data_name, meta_info, content, data_bytes) if meta_info is not None and meta_info.final_block_id is not None: final_id = Component.to_number(meta_info.final_block_id) break except InterestNack as e: logging.info(f'Nacked with reason={e.reason}') except InterestTimeout: logging.info(f'Timeout') semaphore.release() received_or_fail.set()
async def _perform_storage_delete(self, prefix, start_block_id: int, end_block_id: int) -> int: """ Delete data packets between [start_block_id, end_block_id]. If end_block_id is None, delete all continuous data packets from start_block_id. :param prefix: NonStrictName. :param start_block_id: int. :param end_block_id: int. :return: The number of data items deleted. """ delete_num = 0 for idx in range(start_block_id, end_block_id + 1): key = prefix + [Component.from_segment(idx)] if self.storage.get_data_packet(key) != None: self.storage.remove_data_packet(key) delete_num += 1 else: # assume sequence numbers are continuous break # Temporarily release control to make the process non-blocking await aio.sleep(0) return delete_num
async def segment_fetcher(app: NDNApp, name: NonStrictName, app_param: BinaryStr, timeout=4000, retry_times=3, validator=None, must_be_fresh=True): """ An async-generator to fetch a segmented object. Interests are issued one by one. :param app: NDN Application :param name: Name prefix of Data :param timeout: Timeout value, in milliseconds :param retry_times: Times for retry :param validator: Validator :param must_be_fresh: MustBeFresh field of Interest :return: Data segments in order. """ async def retry(first): nonlocal name trial_times = 0 while True: future = app.express_interest(name, app_param=app_param, validator=validator, can_be_prefix=first, must_be_fresh=must_be_fresh, lifetime=timeout, signer=None) try: return await future except InterestTimeout: trial_times += 1 if trial_times >= retry_times: raise name = Name.normalize(name) # First Interest name, meta, content = await retry(True) name += [Component.from_segment(0)] # If it's not segmented if Component.get_type(name[-1]) != Component.TYPE_SEGMENT: yield content return # If it's segmented if Component.to_number(name[-1]) == 0: yield content if meta.final_block_id == name[-1]: return seg_no = 1 else: # If it's not segment 0, starting from 0 seg_no = 0 # Following Interests while True: name[-1] = Component.from_segment(seg_no) name, meta, content = await retry(False) yield content if meta.final_block_id == name[-1]: return seg_no += 1