def updateStateVector(self, seqNum:int, nid:Name=None) -> None: if not nid: nid = self.nid if Name.to_str(nid) == Name.to_str(self.nid): self.seqNum = seqNum self.vector.set(Name.to_str(nid), seqNum) self.scheduler.skip_interval()
def update_prefixes_in_storage(storage: Storage, prefix) -> bool: """ :param storage: Storage :param prefix: NonStrictName Add a new prefix into database return whether the prefix has been registered before """ prefixes_msg = PrefixesInStorage() ret = storage.get('prefixes') if ret: prefixes_msg = PrefixesInStorage.parse(ret) # Check if this prefix already exists prefix_str = Name.to_str(prefix) for existing_prefix in prefixes_msg.prefixes: existing_prefix_str = Name.to_str(existing_prefix) if existing_prefix_str == prefix_str or prefix_str.startswith( existing_prefix_str): return True prefixes_msg.prefixes.append(Name.normalize(prefix)) prefixes_msg_bytes = prefixes_msg.encode() storage.put('prefixes', bytes(prefixes_msg_bytes)) logging.info(f'Added new prefix into the database: {prefix_str}') return False
async def _process_insert(self, int_name: FormalName, int_param: InterestParam, app_param: Optional[BinaryStr]): print(">>>>", int_name, int_param, app_param) cmd_param = CatalogCommandParameter.parse(app_param) name = cmd_param.name # ACK self.app.put_data(int_name, "".encode(), freshness_period=0) # INTEREST try: name = name + ['fetch_map'] print("Sending interest on : ", Name.to_str(name)) _, _, data_bytes = await self.app.express_interest(name, must_be_fresh=True, can_be_prefix=False) except InterestNack: print(">>>NACK") return None except InterestTimeout: print(">>>TIMEOUT") return None data_recvd = CatalogDataListParameter.parse(data_bytes) mapping_name = Name.to_str(data_recvd.name) keys_to_insert = [Name.to_str(data_name) for data_name in data_recvd.insert_data_names] keys_to_delete = [Name.to_str(data_name) for data_name in data_recvd.delete_data_names] vals = [mapping_name]*len(keys_to_insert) self.storage.put_batch(keys_to_insert, vals) self.storage.remove_batch(keys_to_delete)
async def express_interest(self, name, app_param, be_fresh: bool, be_prefix: bool, need_sig: bool): ret = {'name': Name.to_str(name)} try: if need_sig: data_name, meta_info, content = await self.app.express_interest( name, app_param, must_be_fresh=be_fresh, can_be_prefix=be_prefix, identity=self.system_prefix, validator=self.verify_device_ecdsa_signature) else: data_name, meta_info, content = await self.app.express_interest( name, app_param, must_be_fresh=be_fresh, can_be_prefix=be_prefix) except InterestNack as e: ret['response_type'] = 'NetworkNack' ret['reason'] = e.reason except InterestTimeout: ret['response_type'] = 'Timeout' else: ret['response_type'] = 'Data' ret['name'] = Name.to_str(data_name) ret['freshness_period'] = meta_info.freshness_period ret['content_type'] = meta_info.content_type ret['content'] = content return ret
def _on_interest(self, int_name, _int_param, _app_param): logging.info(f'On interest: {Name.to_str(int_name)}') if Name.to_str(int_name) in self.name_str_to_data: self.app.put_raw_packet(self.name_str_to_data[Name.to_str(int_name)]) logging.info(f'Serve data: {Name.to_str(int_name)}') else: logging.info(f'Data does not exist: {Name.to_str(int_name)}')
def _prepare_data(self, file_path: str, name_at_repo, segment_size: int, freshness_period: int, cpu_count: int): """ Shard file into data packets. :param file_path: Local FS path to file to insert :param name_at_repo: Name used to store file at repo """ if not os.path.exists(file_path): logging.error(f'file {file_path} does not exist') return 0 with open(file_path, 'rb') as binary_file: b_array = bytearray(binary_file.read()) if len(b_array) == 0: logging.warning("File is empty") return 0 # use multiple threads to speed up creating TLV seg_cnt = (len(b_array) + segment_size - 1) // segment_size final_block_id = Component.from_segment(seg_cnt - 1) packet_params = [[ name_at_repo + [Component.from_segment(seq)], b_array[seq * segment_size:(seq + 1) * segment_size], freshness_period, final_block_id, ] for seq in range(seg_cnt)] self.encoded_packets[Name.to_str(name_at_repo)] = [] with multiprocessing.Pool(processes=cpu_count) as p: self.encoded_packets[Name.to_str(name_at_repo)] = p.starmap( _create_packets, packet_params) logging.info("Prepared {} data for {}".format( seg_cnt, Name.to_str(name_at_repo)))
def list_key_tree(self): """ Return the id-key-cert tree in a JSON like dict object. """ def get_key_type(key): key = bytes(key) try: RSA.import_key(key) return 'RSA' except ValueError: pass try: ECC.import_key(key) return 'ECC' except ValueError: pass return 'Unknown' sig_type_dic = { SignatureType.NOT_SIGNED: 'NotSigned', SignatureType.DIGEST_SHA256: 'DigestSha256', SignatureType.SHA256_WITH_RSA: 'SignatureSha256WithRsa', SignatureType.SHA256_WITH_ECDSA: 'SignatureSha256WithEcdsa', SignatureType.HMAC_WITH_SHA256: 'SignatureHmacWithSha256' } pib = self.app.keychain ret = {} for id_name, id_obj in pib.items(): cur_id = {'default': '*' if id_obj.is_default else ' ', 'keys': {}} for key_name, key_obj in id_obj.items(): cur_key = { 'default': '*' if key_obj.is_default else ' ', 'key_type': get_key_type(key_obj.key_bits), 'certs': {} } for cert_name, cert_obj in key_obj.items(): cert_v2 = parse_certificate(cert_obj.data) cur_cert = { 'default': '*' if cert_obj.is_default else ' ', 'not_before': bytes(cert_v2.signature_info.validity_period.not_before ).decode(), 'not_after': bytes(cert_v2.signature_info.validity_period.not_after ).decode(), 'issuer_id': Component.to_str(cert_v2.name[-2]), 'key_locator': Name.to_str(cert_v2.signature_info.key_locator.name), 'signature_type': sig_type_dic.get(cert_v2.signature_info.signature_type, 'Unknown') } cur_key['certs'][Name.to_str(cert_name)] = cur_cert cur_id['keys'][Name.to_str(key_name)] = cur_key ret[Name.to_str(id_name)] = cur_id return ret
async def _receive(self, typ: int, data: BinaryStr): """ Pipeline when a packet is received. :param typ: the Type. :param data: the Value of the packet with TL. """ logging.debug('Packet received %s, %s' % (typ, bytes(data))) if typ == LpTypeNumber.LP_PACKET: try: nack_reason, fragment = parse_lp_packet(data, with_tl=True) except (DecodeError, TypeError, ValueError, struct.error): logging.warning('Unable to decode received packet') return data = fragment typ, _ = parse_tl_num(data) else: nack_reason = None if nack_reason is not None: try: name, _, _, _ = parse_interest(data, with_tl=True) except (DecodeError, TypeError, ValueError, struct.error): logging.warning('Unable to decode the fragment of LpPacket') return logging.debug('NetworkNack received %s, reason=%s' % (Name.to_str(name), nack_reason)) self._on_nack(name, nack_reason) else: if typ == TypeNumber.INTEREST: try: name, param, app_param, sig = parse_interest(data, with_tl=True) except (DecodeError, TypeError, ValueError, struct.error): logging.warning('Unable to decode received packet') return logging.debug('Interest received %s' % Name.to_str(name)) await self._on_interest(name, param, app_param, sig, raw_packet=data) elif typ == TypeNumber.DATA: try: name, meta_info, content, sig = parse_data(data, with_tl=True) except (DecodeError, TypeError, ValueError, struct.error): logging.warning('Unable to decode received packet') return logging.debug('Data received %s' % Name.to_str(name)) await self._on_data(name, meta_info, content, sig, raw_packet=data) else: logging.warning('Unable to decode received packet')
async def manage_policy(request): ret = [] logging.debug('/invoke-service response') for device in controller.device_list.devices: ret.append({ 'value': Name.to_str(device.device_identity_name), 'label': Name.to_str(device.device_identity_name) }) return {'device_list': ret}
async def invoke_service(request): list = [] logging.debug('/invoke-service response') for service in controller.service_list.services: list.append({ 'value': Name.to_str(service.service_name), 'label': Name.to_str(service.service_name) }) return {'service_list': list}
async def main() -> int: args = parse_cmd_args() args["node_id"] = Name.to_str(Name.from_str(args["node_id"])) args["group_prefix"] = Name.to_str(Name.from_str(args["group_prefix"])) logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s', \ filename=args["node_id"][1:].replace("/","_")+".log", \ filemode='w+', level=logging.INFO) prog = Program(args) await prog.run()
def on_sd_adv_interest(name: FormalName, param: InterestParam, app_param: Optional[BinaryStr]): """ OnInterest callback when there is an service advertisement :param name: Interest packet name :param param: Interest parameters :app_param: Interest application paramters Packet format: prefix = /<home-prefix>/<SD=1>/<ADV=0>/device-id App Parameter format: TODO:Verifying the signature """ locator = name[3:-1] logging.debug("Adv Interest sender locator: %s", Name.to_str(locator)) fresh_period = struct.unpack("!I", app_param[:4])[0] logging.debug("Adv Interest freshness: %s", str(fresh_period)) service_ids = [sid for sid in app_param[4:]] logging.debug('service ids %s', str(service_ids)) cur_time = self.get_time_now_ms() for sid in service_ids: # Name format: /<home-prefix>/<service>/<locator> sname = [self.system_prefix, b'\x08\x01' + bytes([sid]) ] + locator sname = Name.to_str(sname) logging.debug('Service Name: %s', sname) already_added = False for item in self.service_list.services: if Name.to_str(item.service_name) == sname: already_added = True item.exp_time = cur_time + fresh_period if not already_added: service = ServiceItem() service.service_name = sname service.exp_time = cur_time + fresh_period service.service_id = sid logging.debug('Add new service into the service list') self.service_list.services.append(service) already_added = False for service_meta in self.service_list.service_meta_items: if service_meta.service_id == sid: already_added = True if not already_added: service_meta = ServiceMetaItem() service_meta.service_id = sid aes_key = urandom(16) service_meta.encryption_key = aes_key logging.debug('Add new service meta into the service list') logging.debug('AES key: ') self.service_list.service_meta_items.append(service_meta)
def main() -> int: args = parse_cmd_args() args["node_id"] = Name.to_str(Name.from_str(args["node_id"])) args["group_prefix"] = Name.to_str(Name.from_str(args["group_prefix"])) args["cache_data"] = True SVSyncLogger.config(False, None, logging.INFO) prog = Program(args) prog.run() return 0
def main() -> int: args = parse_cmd_args() args["node_id"] = Name.to_str(Name.from_str(args["node_id"])) args["group_prefix"] = Name.to_str(Name.from_str(args["group_prefix"])) SVSyncLogger.config(True if args["verbose"] else False, None, logging.INFO) try: app.run_forever(after_start=start_count(args)) except (FileNotFoundError, ConnectionRefusedError): print('Error: could not connect to NFD for SVS.') return 0
def test_state_table_metadata() -> None: mynid: Name = Name.from_str("D") sv, st = StateVector(), StateTable(mynid) sv.set("c", 4) sv.set("a", 6) sv.set("B", 10) sv.set(Name.to_str(mynid), 55) ml: List[MissingData] = st.processStateVector(sv, True) st.updateMyState(56) st.updateMetaData() assert 76 == st.getMetaData().tseqno assert Name.to_str(mynid) == bytes(st.getMetaData().source).decode() assert 0 == st.getMetaData().nopcks
async def get_last_frame(device_name): """ Get the last frame number from interest /device/1080p/metadata/timestamp """ message_counter = 0 while message_counter < 100: try: timestamp = ndn.utils.timestamp() name = Name.from_str('/{}/1080p/metadata/'.format(device_name)) + [ Component.from_timestamp(timestamp) ] print( f'Sending Interest {Name.to_str(name)}, {InterestParam(must_be_fresh=False, lifetime=600)}' ) data_name, meta_info, content = await app.express_interest( name, must_be_fresh=False, can_be_prefix=True, lifetime=2000) print(f'Received Data Name: {Name.to_str(data_name)}') ct = bytes(content) last_frame_name = Name.from_str(str(ct)[2:-1]) print("Last Frame number ", Name.to_str(last_frame_name)) last_frame_num = Component.to_number(last_frame_name[-1]) return last_frame_num except InterestNack as e: print(f'Nacked with reason={e.reason}') except InterestTimeout: print(f'Timeout') except InterestCanceled: print(f'Canceled') except ValidationFailure: print(f'Data failed to validate') message_counter += 1
async def handleReceive(self): """ Handle one incoming TCP connection. Multiple data packets may be transferred over a single connection. """ while True: try: ret = await read_tl_num_from_stream(self.reader) assert ret == TypeNumber.DATA siz = await read_tl_num_from_stream(self.reader) data_bytes = await self.reader.readexactly(siz) except aio.IncompleteReadError as exc: self.writer.close() logging.info('Closed TCP connection') return except Exception as exc: print(exc) return # Parse data again to obtain the name (data_name, _, _, _) = parse_data(data_bytes, with_tl=False) self.storage.put(Name.to_str(data_name), data_bytes) logging.info(f'Inserted data: {Name.to_str(data_name)}') # Register prefix for this data existing = CommandHandle.update_prefixes_in_storage( self.storage, data_name) if not existing: self.read_handle.listen(data_name)
async def verify_ecdsa_signature(name: FormalName, sig: SignaturePtrs) -> bool: global local_anchor sig_info = sig.signature_info covered_part = sig.signature_covered_part sig_value = sig.signature_value_buf if not sig_info or sig_info.signature_type != SignatureType.SHA256_WITH_ECDSA: return False if not covered_part or not sig_value: return False key_name = sig_info.key_locator.name[0:] logging.debug('Extract key_name: %s', Name.to_str(key_name)) # local trust anchor pre-shared, already know server's public key key_bits = None try: key_bits = bytes(local_anchor.content) except (KeyError, AttributeError): logging.debug('Cannot load pub key from received certificate') return False pk = ECC.import_key(key_bits) verifier = DSS.new(pk, 'fips-186-3', 'der') sha256_hash = SHA256.new() for blk in covered_part: sha256_hash.update(blk) try: verifier.verify(sha256_hash, bytes(sig_value)) except ValueError: return False return True
async def verify_device_ecdsa_signature(self, name: FormalName, sig: SignaturePtrs) -> bool: sig_info = sig.signature_info covered_part = sig.signature_covered_part sig_value = sig.signature_value_buf if not sig_info or sig_info.signature_type != SignatureType.SHA256_WITH_ECDSA: return False if not covered_part or not sig_value: return False identity = [sig_info.key_locator.name[0] ] + sig_info.key_locator.name[-4:-2] logging.debug('Extract identity id from key id: %s', Name.to_str(identity)) key_bits = None try: key_bits = self.app.keychain.get(identity).default_key().key_bits except (KeyError, AttributeError): logging.error('Cannot find pub key from keychain') return False pk = ECC.import_key(key_bits) verifier = DSS.new(pk, 'fips-186-3', 'der') sha256_hash = SHA256.new() for blk in covered_part: sha256_hash.update(blk) logging.debug(bytes(sig_value)) logging.debug(len(bytes(sig_value))) try: verifier.verify(sha256_hash, bytes(sig_value)) except ValueError: return False return True
def test_state_vector_component_functionality(): sv = StateVector() sv.set("one", 1) sv.set("two", 2) name = Name.from_str("/state_vector/test") + [sv.to_component()] assert name == Name.from_str(Name.to_str(name))
def reply_to_cmd(self, int_name, response: RepoCommandResponse): """ Reply to a command interest """ logging.info('Reply to command: {}'.format(Name.to_str(int_name))) response_bytes = response.encode() self.app.put_data(int_name, response_bytes)
async def device_list(request): ret = [] for device in controller.device_list.devices: ret.append({'deviceId': bytes(device.device_id).decode(), 'deviceInfo': bytes(device.device_info).decode(), 'deviceIdentityName': Name.to_str(device.device_identity_name)}) return {'device_list': ret}
def bootstrap(): global trust_anchor, signer import_safebag("sec/server.safebag", "1234") import_cert("sec/server.ndncert") with open("sec/server.safebag", "r") as safebag: wire = safebag.read() wire = base64.b64decode(wire) wire = parse_and_check_tl(wire, SecurityV2TypeNumber.SAFE_BAG) bag = SafeBag.parse(wire) testbed_signed = CertificateV2Value.parse(bag.certificate_v2) server_key_name = Name.to_str(testbed_signed.name[:-2]) privateKey = serialization.load_der_private_key( bytes(bag.encrypted_key_bag), password=b'1234', backend=default_backend()) server_prv_key = privateKey.private_bytes(Encoding.DER, PrivateFormat.PKCS8, NoEncryption()) signer = Sha256WithEcdsaSigner(server_key_name, server_prv_key) with open("sec/testbed.anchor", "r") as ndncert: wire = ndncert.read() wire = base64.b64decode(wire) trust_anchor = parse_certificate(wire)
def __init__(self, mynid:Name) -> None: self.table = StateVector() self.meta = MetaData() self.meta.source = Name.to_str(mynid).encode() self.meta.tseqno = 0 self.meta.nulled = 0 self.meta.nopcks = 0 self.parts = [[0,0]]
async def metadata_received(interest_name, data_name, meta_info, content): print(f'{time.time() - start_time} \t received metadata') data = bytes(content) metadata = json.loads(data.decode()) tasks = [ download_chuncks_worker(Name.to_str(data_name), 1, 10), ] await asyncio.wait(tasks)
def info_interest(name: FormalName, param: InterestParam, _app_param: Optional[BinaryStr]): print("Received interest for " + Name.to_str(name)) chunk = Component.to_str(name[-1]) if (chunk == 'file1'): app.put_data(name, content=json.dumps(file1_metadata).encode(), freshness_period=1) else: data = 'file1, chunk' + chunk app.put_data(name, content=data.encode() , freshness_period=100)
def test_state_vector_component_functionality() -> None: sv: StateVector = StateVector() sv.set("one", 1) sv.set("two", 2) # does the state vector component act as a compoent in a name name: Name = Name.from_str("/state_vector/test") + [sv.to_component()] assert name == Name.from_str(Name.to_str(name))
async def _retry(seq: int): """ Retry 3 times fetching data of the given sequence number or fail. :param seq: block_id of data """ nonlocal app, name, semaphore, is_failed, received_or_fail, final_id int_name = name[:] int_name.append(str(seq)) trial_times = 0 while True: trial_times += 1 if trial_times > 3: semaphore.release() is_failed = True received_or_fail.set() return try: print(datetime.now().strftime("%H:%M:%S.%f "), end='') print('Express Interest: {}'.format(Name.to_str(int_name))) # Get the data name first, and then use the name to get the entire packet value (including sig). This # is necessary because express_interest() does not return the sig, which is needed by the repo. An # additional decoding step is necessary to obtain the metadata. data_name, _, _ = await app.express_interest( int_name, must_be_fresh=True, can_be_prefix=False, lifetime=1000) data_bytes = app.get_original_packet_value(data_name) (_, meta_info, content, sig) = ndn_format_0_3.parse_data(data_bytes, with_tl=False) # Save data and update final_id print(datetime.now().strftime("%H:%M:%S.%f "), end='') print('Received data: {}'.format(Name.to_str(data_name))) seq_to_data_packet[seq] = data_bytes if meta_info is not None and meta_info.final_block_id is not None: final_id = Component.to_number(meta_info.final_block_id) break except InterestNack as e: print(f'Nacked with reason={e.reason}') except InterestTimeout: print(f'Timeout') semaphore.release() received_or_fail.set()
def _on_interest(self, int_name, _int_param, _app_param): if not self.storage.exists(Name.to_str(int_name)): return data_bytes = self.storage.get(Name.to_str(int_name)) # Append TL type_len = tlv_var.get_tl_num_size(ndn_format_0_3.TypeNumber.DATA) len_len = tlv_var.get_tl_num_size(len(data_bytes)) wire = bytearray(type_len + len_len + len(data_bytes)) offset = 0 offset += tlv_var.write_tl_num(ndn_format_0_3.TypeNumber.DATA, wire, offset) offset += tlv_var.write_tl_num(len(data_bytes), wire, offset) wire[offset:] = data_bytes self.app.put_raw_packet(wire) logging.info(f'Read handle: serve data {Name.to_str(int_name)}')
async def service_list(request): list = [] logging.debug('/service-list response') for service in controller.service_list.services: tp = service.exp_time / 1000 list.append({'serviceId': str(service.service_id), 'serviceName': Name.to_str(service.service_name), 'expTime': datetime.utcfromtimestamp(tp).strftime('%Y-%m-%d %H:%M:%S')}) return {'service_list': list}