def _prepare_data(self, file_path: str, name_at_repo, segment_size: int, freshness_period: int, cpu_count: int): """ Shard file into data packets. :param file_path: Local FS path to file to insert :param name_at_repo: Name used to store file at repo """ if not os.path.exists(file_path): logging.error(f'file {file_path} does not exist') return 0 with open(file_path, 'rb') as binary_file: b_array = bytearray(binary_file.read()) if len(b_array) == 0: logging.warning("File is empty") return 0 # use multiple threads to speed up creating TLV seg_cnt = (len(b_array) + segment_size - 1) // segment_size final_block_id = Component.from_segment(seg_cnt - 1) packet_params = [[ name_at_repo + [Component.from_segment(seq)], b_array[seq * segment_size:(seq + 1) * segment_size], freshness_period, final_block_id, ] for seq in range(seg_cnt)] with multiprocessing.Pool(processes=cpu_count) as p: self.encoded_packets = p.starmap(_create_packets, packet_params)
def main(): if len(sys.argv) <= 2: print(f'Usage: {sys.argv[0]} <name> <file>') exit(0) logging.basicConfig(format='[{asctime}]{levelname}:{message}', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO, style='{') app = NDNApp() name = Name.normalize(sys.argv[1]) name.append(Component.from_version(timestamp())) with open(sys.argv[2], 'rb') as f: data = f.read() seg_cnt = (len(data) + SEGMENT_SIZE - 1) // SEGMENT_SIZE packets = [app.prepare_data(name + [Component.from_segment(i)], data[i*SEGMENT_SIZE:(i+1)*SEGMENT_SIZE], freshness_period=10000, final_block_id=Component.from_segment(seg_cnt - 1)) for i in range(seg_cnt)] print(f'Created {seg_cnt} chunks under name {Name.to_str(name)}') @app.route(name) def on_interest(int_name, _int_param, _app_param): if Component.get_type(int_name[-1]) == Component.TYPE_SEGMENT: seg_no = Component.to_number(int_name[-1]) else: seg_no = 0 if seg_no < seg_cnt: app.put_raw_packet(packets[seg_no]) app.run_forever()
def __init__(self, app: NDNApp, repo: repos.GitRepo, pipeline: RepoSyncPipeline): self.app = app self.repo = repo self.pipeline = pipeline self.prefix = Name.from_str(os.getenv("GIT_NDN_PREFIX") + f'/project/{repo.repo_name}') aio.create_task(self.app.register(self.prefix + [Component.from_str('ref-list')], self.ref_list)) aio.create_task(self.app.register(self.prefix + [Component.from_str('push')], self.push))
def on_interest(int_name, _int_param, _app_param): if Component.get_type(int_name[-1]) == Component.TYPE_SEGMENT: seg_no = Component.to_number(int_name[-1]) else: seg_no = 0 if seg_no < seg_cnt: app.put_raw_packet(packets[seg_no])
async def get_last_frame(device_name): """ Get the last frame number from interest /device/1080p/metadata/timestamp """ message_counter = 0 while message_counter < 100: try: timestamp = ndn.utils.timestamp() name = Name.from_str('/{}/1080p/metadata/'.format(device_name)) + [ Component.from_timestamp(timestamp) ] print( f'Sending Interest {Name.to_str(name)}, {InterestParam(must_be_fresh=False, lifetime=600)}' ) data_name, meta_info, content = await app.express_interest( name, must_be_fresh=False, can_be_prefix=True, lifetime=2000) print(f'Received Data Name: {Name.to_str(data_name)}') ct = bytes(content) last_frame_name = Name.from_str(str(ct)[2:-1]) print("Last Frame number ", Name.to_str(last_frame_name)) last_frame_num = Component.to_number(last_frame_name[-1]) return last_frame_num except InterestNack as e: print(f'Nacked with reason={e.reason}') except InterestTimeout: print(f'Timeout') except InterestCanceled: print(f'Canceled') except ValidationFailure: print(f'Data failed to validate') message_counter += 1
def sha256_tester(typ, uri_prefix): hex_text = '%28%ba%d4%b5%27%5b%d3%92%db%b6%70%c7%5c%f0%b6%6f%13%f7%94%2b%21%e8%0f%55%c0%e8%6b%37%47%53%a5%48' hex_lower = ''.join(hex_text.split('%')) hex_upper = hex_lower.upper() comp = Component.from_bytes(bytes.fromhex(hex_upper), typ=typ) assert Component.get_type(comp) == typ assert Component.to_str(comp) == uri_prefix + hex_lower assert Component.from_str(uri_prefix + hex_lower) == comp assert Component.from_str(uri_prefix + hex_upper) == comp
def __init__(self, app: NDNApp, groupPrefix: Name, nid: Name, updateCallback: Callable, storage: Optional[Storage] = None, securityOptions: Optional[SecurityOptions] = None) -> None: preSyncPrefix, preDataPrefix = groupPrefix + [ Component.from_str("sync") ], nid + groupPrefix + [Component.from_str("data")] super().__init__(app, preSyncPrefix, preDataPrefix, groupPrefix, nid, updateCallback, storage, securityOptions)
def main(): parser = argparse.ArgumentParser(description='python client.py') parser.add_argument('-n', '--node_prefix', required=True, help='Prefix of catalog ("/217B/repo/node/A")') parser.add_argument('-c', '--command', default='insert', choices=['insert', 'delete', 'recall'], help='Command Verb') parser.add_argument('-d', '--data_name', required=True, help='data name ("/foo/bar/1.txt")') parser.add_argument('-s', '--hash', required=True, help='data hash ("1bd109fe")') parser.add_argument('-o', '--desired_copies', type=int, default=3, help='desired copies') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp(face=None, keychain=KeychainDigest()) name = Name.from_str(args.node_prefix) name.append(Component.from_str(args.command)) datainfo_tlv_model = DatainfoTlvModel() datainfo_tlv_model.data_name = args.data_name.encode() datainfo_tlv_model.hash = args.hash.encode() datainfo_tlv_model.desired_copies = args.desired_copies datainfo_name_component = Component.from_bytes(datainfo_tlv_model.encode()) name.append(datainfo_name_component) # logging.info(name) try: app.run_forever(after_start=send(app, name)) except FileNotFoundError: logging.error('Error: could not connect to NFD.\n') return 0
def test_basic(self): class Model(TlvModel): name = NameField() int_val = UintField(0x03) str_val = BytesField(0x02) bool_val = BoolField(0x01) model = Model() model.name = ['test', Component.from_str('name')] model.int_val = 0 assert model.encode() == b'\x07\x0c\x08\x04test\x08\x04name\x03\x01\x00' model = Model.parse(b'\x07\x0c\x08\x04test\x08\x04name\x03\x01\x00') assert model.name == Name.from_str('/test/name') assert model.int_val == 0 assert not model.bool_val model.name = 'test/name' model.str_val = b'str' model.bool_val = True assert model.encode() == b'\x07\x0c\x08\x04test\x08\x04name\x03\x01\x00\x02\x03str\x01\x00' model = Model.parse(b'\x07\x0c\x08\x04test\x08\x04name\x03\x01\x00\x02\x03str\x01\x00') assert model.str_val == b'str' assert model.bool_val
async def main(): try: timestamp = ndn.utils.timestamp() name = Name.from_str('/example/testApp/randomData') + [ Component.from_timestamp(timestamp) ] print( f'Sending Interest {Name.to_str(name)}, {InterestParam(must_be_fresh=True, lifetime=6000)}' ) data_name, meta_info, content = await app.express_interest( name, must_be_fresh=True, can_be_prefix=False, lifetime=6000) print(f'Received Data Name: {Name.to_str(data_name)}') print(meta_info) print(bytes(content) if content else None) except InterestNack as e: print(f'Nacked with reason={e.reason}') except InterestTimeout: print(f'Timeout') except InterestCanceled: print(f'Canceled') except ValidationFailure: print(f'Data failed to validate') finally: app.shutdown()
async def fetch(self, obj_type: str, obj_name: bytes): # Return if it exists if self.repo.has_obj(obj_name) and obj_name not in self.incomplete_list: return False self.incomplete_list[obj_name] = obj_type # Fetch object packet_name = self.prefix + [Component.from_bytes(obj_name)] wire = b''.join([bytes(seg) async for seg in segment_fetcher(self.app, packet_name, must_be_fresh=False)]) pack = SyncObject.parse(wire, ignore_critical=True) fetched_obj_type = bytes(pack.obj_type).decode() # Check type if obj_type and obj_type != fetched_obj_type: raise ValueError(f'{obj_type} is expected but get {fetched_obj_type}') # Write into repo TODO: Transfer compressed data h = hashlib.sha1(obj_type.encode() + b' ' + f'{len(pack.obj_data)}'.encode() + b'\x00') h.update(pack.obj_data) if h.digest() != obj_name: raise ValueError(f'{obj_name} has a different digest') self.repo.store_obj(bytes(pack.obj_type), bytes(pack.obj_data)) # Trigger recurisve fetching if obj_type == "commit": await self.traverse_commit(bytes(pack.obj_data)) elif obj_type == "tree": await self.traverse_tree(bytes(pack.obj_data)) elif obj_type != "blob": raise ValueError(f'Unknown data type {obj_type}') del self.incomplete_list[obj_name]
def on_interest(inst_name: FormalName, inst_param: InterestParam, app_param: BinaryStr): d = json.loads(app_param.tobytes().decode()) enc_session_key = base64.b64decode(d['enc_session_key']) nonce = base64.b64decode(d['nonce']) if Component.get_type(inst_name[-1]) == Component.TYPE_SEGMENT: seg_no = Component.to_number(inst_name[-1]) else: seg_no = 0 if seg_no < seg_cnt: app.put_data(inst_name, packets[seg_no], final_block_id=Component.from_segment(seg_cnt - 1), freshness_period=10000)
async def query_face_id(app, uri, fuzzy_query=False): query_filter = FaceQueryFilter() query_filter.face_query_filter = FaceQueryFilterValue() if not fuzzy_query: query_filter.face_query_filter.uri = uri.encode('utf-8') else: query_filter.face_query_filter.uri_scheme = uri.encode('utf-8') query_filter_msg = query_filter.encode() name = Name.from_str("/localhost/nfd/faces/query") + [ Component.from_bytes(query_filter_msg) ] try: _, _, data = await app.express_interest(name, lifetime=1000, can_be_prefix=True, must_be_fresh=True) except (InterestCanceled, InterestTimeout, InterestNack, ValidationFailure, NetworkError): logging.error(f'Query failed') return None ret = FaceStatusMsg.parse(data) logging.info(ret) if not fuzzy_query: return ret.face_status[0].face_id else: return ret
def test_state_vector_decode(): enc_sv = b'\xCA\x03\x6F\x6E\x65\xCB\x01\x01\xCA\x03\x74\x77\x6F\xCB\x01\x02' enc_sv = Component.from_bytes(enc_sv, StateVectorModelTypes.VECTOR.value) sv = StateVector(enc_sv) assert sv.get("one") == 1 assert sv.get("two") == 2
async def _check(self, method: str, repo_name, process_id: int) -> RepoCommandResponse: """ Return parsed insert check response message. # TODO: Use command interests instead of regular interests """ cmd_param = RepoCommandParameter() cmd_param.process_id = process_id cmd_param_bytes = cmd_param.encode() name = repo_name[:] name.append(method + ' check') name.append(Component.from_bytes(cmd_param_bytes)) try: print(f'Expressing interest: {Name.to_str(name)}') data_name, meta_info, content = await self.app.express_interest( name, must_be_fresh=True, can_be_prefix=False, lifetime=1000) print(f'Received data name: {Name.to_str(data_name)}') except InterestNack as e: print(f'Nacked with reason={e.reason}') return None except InterestTimeout: print(f'Timeout: {Name.to_str(name)}') return None try: cmd_response = RepoCommandResponse.parse(content) except DecodeError as exc: logging.warning('Response blob decoding failed') return None except Exception as e: print(e) return cmd_response
def encode(self) -> Component: model = MetaDataModel() model.source = self.source model.tseqno = self.tseqno model.nulled = self.nulled model.nopcks = self.nopcks return Component.from_bytes(model.encode())
def __init__(self, app: NDNApp, groupPrefix: Name, nid: Name, updateCallback: Callable, cacheOthers: bool, storage: Optional[Storage] = None, securityOptions: Optional[SecurityOptions] = None) -> None: self.cacheOthers = cacheOthers preDataPrefix = groupPrefix + [ Component.from_str("data") ] if self.cacheOthers else groupPrefix + [Component.from_str("data") ] + nid preSyncPrefix = groupPrefix + [Component.from_str("sync")] super().__init__(app, preSyncPrefix, preDataPrefix, groupPrefix, nid, updateCallback, storage, securityOptions)
def test_app_param(): interest = ( b'\x05\x42\x07\x36\x08\x05local\x08\x03ndn\x08\x06prefix' b'\x02 \x47\x75\x6f\x21\xfe\x0e\xe2\x65\x14\x9a\xa2\xbe\x3c\x63\xc5\x38' b'\xa7\x23\x78\xe9\xb0\xa5\x8b\x39\xc5\x91\x63\x67\xd3\x5b\xda\x10' b'\x0c\x02\x0f\xa0\x24\x04\x01\x02\x03\x04') name, params, app_params, sig = parse_interest(interest) assert name == Name.from_str( '/local/ndn/prefix' '/params-sha256=47756f21fe0ee265149aa2be3c63c538a72378e9b0a58b39c5916367d35bda10' ) assert app_params == b'\x01\x02\x03\x04' assert not params.can_be_prefix assert not params.must_be_fresh assert params.nonce is None assert params.lifetime == 4000 assert params.hop_limit is None assert sig.signature_info is None algo = hashlib.sha256() algo.update(b'\x24\x04\x01\x02\x03\x04') assert Component.get_value(name[-1]) == algo.digest() algo = hashlib.sha256() for part in sig.digest_covered_part: algo.update(part) assert sig.digest_value_buf == algo.digest()
def list_key_tree(self): """ Return the id-key-cert tree in a JSON like dict object. """ def get_key_type(key): key = bytes(key) try: RSA.import_key(key) return 'RSA' except ValueError: pass try: ECC.import_key(key) return 'ECC' except ValueError: pass return 'Unknown' sig_type_dic = { SignatureType.NOT_SIGNED: 'NotSigned', SignatureType.DIGEST_SHA256: 'DigestSha256', SignatureType.SHA256_WITH_RSA: 'SignatureSha256WithRsa', SignatureType.SHA256_WITH_ECDSA: 'SignatureSha256WithEcdsa', SignatureType.HMAC_WITH_SHA256: 'SignatureHmacWithSha256' } pib = self.app.keychain ret = {} for id_name, id_obj in pib.items(): cur_id = {'default': '*' if id_obj.is_default else ' ', 'keys': {}} for key_name, key_obj in id_obj.items(): cur_key = { 'default': '*' if key_obj.is_default else ' ', 'key_type': get_key_type(key_obj.key_bits), 'certs': {} } for cert_name, cert_obj in key_obj.items(): cert_v2 = parse_certificate(cert_obj.data) cur_cert = { 'default': '*' if cert_obj.is_default else ' ', 'not_before': bytes(cert_v2.signature_info.validity_period.not_before ).decode(), 'not_after': bytes(cert_v2.signature_info.validity_period.not_after ).decode(), 'issuer_id': Component.to_str(cert_v2.name[-2]), 'key_locator': Name.to_str(cert_v2.signature_info.key_locator.name), 'signature_type': sig_type_dic.get(cert_v2.signature_info.signature_type, 'Unknown') } cur_key['certs'][Name.to_str(cert_name)] = cur_cert cur_id['keys'][Name.to_str(key_name)] = cur_key ret[Name.to_str(id_name)] = cur_id return ret
def test_basic_encode(): uri = ('/Emid/25042=P3//./%1C%9F' '/sha256digest=0415e3624a151850ac686c84f155f29808c0dd73819aa4a4c20be73a4d8a874c') name = Name.from_str(uri) assert len(name) == 6 assert name[0] == Component.from_bytes(b'Emid') assert name[1] == b'\xfd\x61\xd2\x02\x50\x33' assert name[2] == Component.from_bytes(b'') assert name[3] == Component.from_bytes(b'.') assert name[4] == Component.from_bytes(b'\x1C\x9F') assert Component.get_type(name[5]) == Component.TYPE_IMPLICIT_SHA256 assert Name.encoded_length(name) == 57 assert (Name.encode(name) == b'\x07\x37\x08\x04Emid\xfda\xd2\x02P3\x08\x00\x08\x01.\x08\x02\x1c\x9f' b'\x01 \x04\x15\xe3bJ\x15\x18P\xachl\x84\xf1U\xf2\x98\x08\xc0\xdds\x81' b'\x9a\xa4\xa4\xc2\x0b\xe7:M\x8a\x87L')
def test_state_vector_decode() -> None: # hard coded bytes of component vector based on SVS protocol enc_sv = b'\xCA\x03\x6F\x6E\x65\xCB\x01\x01\xCA\x03\x74\x77\x6F\xCB\x01\x02' enc_sv = Component.from_bytes(enc_sv, SVSyncTlvTypes.VECTOR.value) sv: StateVector = StateVector(enc_sv) assert sv.get("one") == 1 assert sv.get("two") == 2
def on_interest(name, param, _app_param): print(f'>> I: {Name.to_str(name)}, {param}') content = "world!".encode() data_name = name + [Component.from_version(timestamp())] app.put_data(data_name, content=content, freshness_period=10000) print(f'<< D: {Name.to_str(data_name)}') print(f'Content: {content.decode()}') print('')
def ref_list(self, name: FormalName, _param: InterestParam, _app_param: typing.Optional[BinaryStr]): ref_heads = self.repo.get_ref_heads() result = '\n'.join(f'{head.hex()} {ref}' for ref, head in ref_heads.items()) result += '\n' logging.debug(f'On ref-list: {repr(result)}') data_name = name + [Component.from_timestamp(timestamp())] self.app.put_data(data_name, result.encode(), freshness_period=1000)
def info_interest(name: FormalName, param: InterestParam, _app_param: Optional[BinaryStr]): print("Received interest for " + Name.to_str(name)) chunk = Component.to_str(name[-1]) if (chunk == 'file1'): app.put_data(name, content=json.dumps(file1_metadata).encode(), freshness_period=1) else: data = 'file1, chunk' + chunk app.put_data(name, content=data.encode() , freshness_period=100)
def on_interest(name: FormalName, param: InterestParam, _app_param: Optional[BinaryStr]): logging.info(f'>> I: {Name.to_str(name)}, {param}') request = Name.to_str(name).split("/") print("handle Interest Name", Name.to_str(name)) if request[-2] == "metadata": print("handle Meta data") # content = json.dumps(list(pred_frame_buffer)).encode() # content = str(current_I_frame).encode() content = Name.to_str( name + [Component.from_number(current_I_frame, 0)]).encode() name = name app.put_data(name, content=content, freshness_period=300) logging.info("handle to name " + Name.to_str(name)) elif request[-3] == "frame": interest_frame_num = int(request[-1]) if interest_frame_num in frame_buffer_dict: content = frame_buffer_dict[interest_frame_num] app.put_data(name + [b'\x08\x02\x00\x00'], content=content, freshness_period=2000, final_block_id=Component.from_segment(0)) print( f'handle interest: publish pending interest' + Name.to_str(name) + "------------/" + str(interest_frame_num) + "length: ", len(content)) else: interest_buffer.append([interest_frame_num, name]) else: print("handle Request missing ", Name.to_str(name)) while len(interest_buffer) > 0 and len( frame_buffer) > 0 and frame_buffer[-1] >= interest_buffer[0][0]: pendingInterest = interest_buffer.popleft() pendingFN = pendingInterest[0] pendingName = pendingInterest[1] if pendingFN in frame_buffer_dict: content = frame_buffer_dict[pendingFN] app.put_data(pendingName + [b'\x08\x02\x00\x00'], content=content, freshness_period=2000, final_block_id=Component.from_segment(0)) print( f'handle interest: publish pending interest' + Name.to_str(pendingName) + "------------/" + str(pendingFN) + "length: ", len(content))
def test_number(): assert Component.from_segment(13) == b'!\x01\r' assert Component.from_byte_offset(13) == b'\x22\x01\r' assert Component.from_sequence_num(13) == b'%\x01\r' assert Component.from_version(13) == b'#\x01\r' timeval = 15686790223318112 comp = Component.from_timestamp(timeval) assert Component.get_type(comp) == 36 assert Component.get_value(comp) == b'\x00\x37\xbb\x0d\x76\xed\x4c\x60' assert Component.to_number(comp) == timeval
async def face_event(self): last_seq = -1 name_prefix = Name.from_str('/localhost/nfd/faces/events') while True: if last_seq >= 0: name = name_prefix + [ Component.from_sequence_num(last_seq + 1) ] init = False else: name = name_prefix init = True logging.info("Face event notification stream %s", Name.to_str(name)) try: data_name, _, content = await self.app.express_interest( name, must_be_fresh=init, can_be_prefix=init, lifetime=60000) last_seq = Component.to_number(data_name[-1]) timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") if not content: print('ERROR: Face event is empty') elif content[0] == 0x65: msg = parse_response(content) print('Query failed with response', msg['status_code'], msg['status_text']) else: dic = self.face_event_to_dict(content) dic['seq'] = str(last_seq) dic['time'] = timestamp await self.emit('face event', dic) self.event_list.append(dic) except (InterestCanceled, NetworkError): break except InterestTimeout: last_seq = -1 except InterestNack as e: print(f'Face events nacked with reason={e.reason}') last_seq = -1 except ValidationFailure: print('Face events failed to validate') last_seq = -1 await asyncio.sleep(0.1)
def on_interest(self, name: FormalName, _param: InterestParam, _app_param: typing.Optional[BinaryStr]): # Get the name and segment number if Component.get_type(name[-1]) == Component.TYPE_SEGMENT: obj_name = Component.get_value(name[-2]) seg_no = Component.to_number(name[-1]) else: obj_name = Component.get_value(name[-1]) seg_no = 0 # Read the data # Git objects are small so we can read the whole object try: obj_type, data = self.repo.read_obj(bytes(obj_name)) except ValueError: logging.warning(f'Requested file {obj_name} does not exist in repo {self.repo.repo_name}') return # Extract the segment and calculate Name data_name = self.prefix + [Component.from_bytes(obj_name), Component.from_segment(seg_no)] start_pos = seg_no * SEGMENTATION_SIZE data_seg = data[start_pos:start_pos + SEGMENTATION_SIZE] packet_obj = SyncObject() packet_obj.obj_type = obj_type.encode() packet_obj.obj_data = data_seg wire = packet_obj.encode() final_block = (len(data) + SEGMENTATION_SIZE - 1) // SEGMENTATION_SIZE self.app.put_data(data_name, wire, freshness_period=3600000, final_block_id=Component.from_segment(final_block)) logging.debug(f'Responded {obj_name} segment {final_block} in repo {self.repo.repo_name}')
def _on_interest(self, int_name, _int_param, _app_param): # use segment number to index into the encoded packets array logging.info(f'On interest: {Name.to_str(int_name)}') seq = Component.to_number(int_name[-1]) if seq >= 0 and seq < len(self.encoded_packets): self.app.put_raw_packet(self.encoded_packets[seq]) logging.info(f'Serve data: {Name.to_str(int_name)}') else: logging.info(f'Data does not exist: {Name.to_str(int_name)}')
def decode_sql(name, catalog_prefix): #remove the prefix sql_name_component = name[len(Name.normalize(catalog_prefix))] # parse the sql_name_component sqls_tlv_model = SqlsTlvModel.parse( Component.get_value(sql_name_component).tobytes()) # from list of bytearray to list of string sqls = list(map(lambda x: x.tobytes().decode(), sqls_tlv_model.sqls)) return sqls