async def app_main(self): name = f'/example/testApp/randomData/{Component.TYPE_TIMESTAMP}=%00%00%01%6d%a4%f3%ff%6d' data_name, meta_info, content = await self.app.express_interest( name, must_be_fresh=True, can_be_prefix=False, lifetime=6000, nonce=None) assert data_name == Name.from_str(name) assert meta_info.freshness_period == 1000 assert content == b'Hello, world!'
def __init__(self, app: NDNApp, storage: Storage, config: dict): """ :param app: NDNApp. :param storage: Storage. TODO: determine which prefix to listen on. """ self.app = app self.storage = storage self.register_root = config['repo_config']['register_root'] if self.register_root: self.listen(Name.from_str('/'))
def test1(): lp_packet = ( b"\x64\x32\xfd\x03\x20\x05\xfd\x03\x21\x01\x96" b"\x50\x27\x05\x25\x07\x1f\x08\tlocalhost\x08\x03nfd\x08\x05faces\x08\x06events" b"\x21\x00\x12\x00") nack_reason, interest = parse_network_nack(lp_packet, True) assert nack_reason == NackReason.NO_ROUTE name, param, _, _ = parse_interest(interest) assert name == Name.from_str("/localhost/nfd/faces/events") assert param.must_be_fresh assert param.can_be_prefix
async def on_missing_data(self, missing_list: List[MissingData]) -> None: for i in missing_list: while i.lowSeqno <= i.highSeqno: content_str: Optional[bytes] = await self.svs.fetchData( Name.from_str(i.nid), i.lowSeqno, 2) if content_str: output_str: str = i.nid + ": " + content_str.decode() sys.stdout.write("\033[K") sys.stdout.flush() print(output_str) i.lowSeqno = i.lowSeqno + 1
def test_default_3(): data = (b"\x06\x1b\x07\x14\x08\x05local\x08\x03ndn\x08\x06prefix" b"\x14\x03\x18\x01\x00") name, meta_info, content, sig = parse_data(data) assert name == Name.from_str("/local/ndn/prefix") assert meta_info.content_type == ContentType.BLOB assert meta_info.freshness_period is None assert meta_info.final_block_id is None assert sig.signature_info is None assert content is None assert sig.signature_value_buf is None
async def wrapper(missing_list:List[MissingData]) -> None: for i in missing_list: nid = Name.from_str(i.nid) while i.lowSeqNum <= i.highSeqNum: content_str = await thread.getSVSync().fetchData(nid, i.lowSeqNum) if content_str != None: content_str = i.nid + ": " + content_str.decode(); sys.stdout.write("\033[K") sys.stdout.flush() print(content_str) i.lowSeqNum = i.lowSeqNum + 1
def main(): logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) publisher_prefix = Name.from_str('/test_publisher') app = NDNApp() try: app.run_forever(after_start=run_publisher(app, publisher_prefix)) except FileNotFoundError: logging.warning('Error: could not connect to NFD')
async def run_publisher(app: NDNApp, publisher_prefix: NonStrictName): pb = PubSub(app, publisher_prefix) await pb.wait_for_ready() topic = Name.from_str('/topic_foo') msg = f'pubsub message generated at {str(datetime.datetime.now())}'.encode( ) pb.publish(topic, msg) # wait for msg to be fetched by subsciber await aio.sleep(10) app.shutdown()
async def _handle(app, keeper, cmd, prefix, args=None): """ Invoke broker's internal command `cmd`.capitalize(). """ command, interest_param, app_param = \ keeper.make_generic_cmd(cmd.capitalize(), prefix, **patch_args(cmd, args)) data_name, meta_info, content = await app.express_interest( Name.from_str(command), interest_param=interest_param, app_param=app_param, must_be_fresh=True) return bytes(content).decode()
def test_default(): name = Name.from_str('/local/ndn/prefix') interest = make_interest(name, InterestParam()) assert interest == b'\x05\x1a\x07\x14\x08\x05local\x08\x03ndn\x08\x06prefix\x0c\x02\x0f\xa0' name = Name.encode(name) interest = make_interest(name, InterestParam()) assert interest == b'\x05\x1a\x07\x14\x08\x05local\x08\x03ndn\x08\x06prefix\x0c\x02\x0f\xa0' name = '/local/ndn/prefix' interest = make_interest(name, InterestParam()) assert interest == b'\x05\x1a\x07\x14\x08\x05local\x08\x03ndn\x08\x06prefix\x0c\x02\x0f\xa0'
def test1(): lp_packet = ( b'\xfd\x03 \x05\xfd\x03!\x01\x96' b'P\x43\x05)\x07\x1f\x08\tlocalhost\x08\x03nfd\x08\x05faces\x08\x06events' b'\x21\x00\x12\x00\x0c\x02\x03\xe8') nack_reason, interest = parse_network_nack(lp_packet, False) assert nack_reason == NackReason.NO_ROUTE name, param, _, _ = parse_interest(interest) assert name == Name.from_str("/localhost/nfd/faces/events") assert param.must_be_fresh assert param.can_be_prefix assert param.lifetime == 1000
async def app_main(self): future1 = self.app.express_interest('/not', nonce=None, lifetime=5, can_be_prefix=False) future2 = self.app.express_interest('/not', nonce=None, lifetime=5, can_be_prefix=True) future3 = self.app.express_interest('/not/important', nonce=None, lifetime=5, can_be_prefix=False) name2, _, content2 = await future3 name1, _, content1 = await future2 with pytest.raises(InterestTimeout): await future1 assert name1 == Name.from_str('/not/important') assert content1 == b'test' assert name2 == Name.from_str('/not/important') assert content2 == b'test'
async def run(self): await aio.sleep(1) # wait for repo to startup filepath = self.create_tmp_file() filename = '/TestFlags/file' pc = PutfileClient(self.app, Name.from_str('/putfile_client'), Name.from_str(repo_name)) await pc.insert_file(filepath, Name.from_str(filename), segment_size=8000, freshness_period=0, cpu_count=multiprocessing.cpu_count()) ret = await self.fetch(Name.from_str('/TestFlags'), must_be_fresh=False, can_be_prefix=False) assert ret == None ret = await self.fetch(Name.from_str('/TestFlags'), must_be_fresh=False, can_be_prefix=True) assert ret != None ret = await self.fetch(Name.from_str('/TestFlags'), must_be_fresh=True, can_be_prefix=True) assert ret == None self.app.shutdown()
async def run(self): await aio.sleep(2) # wait for repo to startup # respond to interest from repo def on_int(int_name, _int_param, _app_param): self.app.put_data(int_name, b'foobar', freshness_period=1000) await self.app.register('test_name', on_int) # construct insert parameter cmd_param = RepoCommandParameter() cmd_param.name = 'test_name' cmd_param.start_block_id = None cmd_param.end_block_id = None process_id = os.urandom(4) cmd_param.process_id = process_id cmd_param.check_prefix = CheckPrefix() cmd_param.check_prefix.name = Name.from_str('/putfile_client') cmd_param_bytes = cmd_param.encode() pb = PubSub(self.app, Name.from_str('/putfile_client')) await pb.wait_for_ready() is_success = await pb.publish( Name.from_str(repo_name) + ['insert'], cmd_param_bytes) assert is_success # insert_num should be 1 checker = CommandChecker(self.app) n_retries = 3 while n_retries > 0: response = await checker.check_insert(Name.from_str(repo_name), process_id) if response is None or response.status_code == 404: n_retries -= 1 elif response.status_code != 300: assert response.status_code == 200 assert response.insert_num == 1 break await aio.sleep(1) self.app.shutdown()
def test_verify(self): # Ecdsa signature is not unique, so we only test if we can verify it pri_key = ECC.generate(curve="P-256") key = pri_key.export_key(format="DER") pub_key = pri_key.public_key() signer = Sha256WithEcdsaSigner("/K/KEY/x", key) pkt = make_data("/test", MetaInfo(), b"test content", signer=signer) _, _, _, sig_ptrs = parse_data(pkt) # Test its format is ASN.1 der format DerSequence().decode(bytes(sig_ptrs.signature_value_buf)) validator = EccChecker.from_key( "/K/KEY/x", bytes(pub_key.export_key(format='DER'))) assert aio.run(validator(Name.from_str("/test"), sig_ptrs))
def test_encode_func(): name = Name.from_str('/a/b/c/d') buf = bytearray(20) with pytest.raises(IndexError): Name.encode(name, buf, 10) assert Name.encode( name, buf, 6 ) == b'\x00\x00\x00\x00\x00\x00\x07\x0c\x08\x01a\x08\x01b\x08\x01c\x08\x01d' assert Name.encode( name, buf ) == b'\x07\x0c\x08\x01a\x08\x01b\x08\x01c\x08\x01d\x08\x01c\x08\x01d' assert Name.encode([]) == b'\x07\x00'
def test_data_1(self): key = bytes(i for i in range(32)) signer = HmacSha256Signer('key1', key) data = make_data('/ndn/abc', MetaInfo(None), b'SUCCESS!', signer) assert ( data.hex() == '0649070a08036e646e0803616263' '140015085355434345535321' '160d1b01041c08070608046b657931' '172019868e7183998df373332f3dd1c9c950fc29d734c07977791d8396fa3b91fd36' ) _, _, _, sig_ptrs = parse_data(data) validator = HmacChecker.from_key('key1', key) assert aio.run(validator(Name.from_str('/ndn/abc'), sig_ptrs))
def test_default(): interest = b'\x05\x1a\x07\x14\x08\x05local\x08\x03ndn\x08\x06prefix\x0c\x02\x0f\xa0' name, params, app_params, sig = parse_interest(interest) assert name == Name.from_str('/local/ndn/prefix') assert app_params is None assert not params.can_be_prefix assert not params.must_be_fresh assert params.nonce is None assert params.lifetime == 4000 assert params.hop_limit is None assert sig.signature_info is None assert sig.signature_value_buf is None assert sig.digest_value_buf is None
def test_state_table_metadata() -> None: mynid: Name = Name.from_str("D") sv, st = StateVector(), StateTable(mynid) sv.set("c", 4) sv.set("a", 6) sv.set("B", 10) sv.set(Name.to_str(mynid), 55) ml: List[MissingData] = st.processStateVector(sv, True) st.updateMyState(56) st.updateMetaData() assert 76 == st.getMetaData().tseqno assert Name.to_str(mynid) == bytes(st.getMetaData().source).decode() assert 0 == st.getMetaData().nopcks
def main(): parser = argparse.ArgumentParser(description='segmented insert client') parser.add_argument('-r', '--repo_name', required=True, help='Name of repo') parser.add_argument('-p', '--process_id', required=True, help="Process ID") args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp() app.run_forever(after_start=run_check(app, repo_name=Name.from_str(args.repo_name), process_id=int(args.process_id)))
async def query_face_id(app, uri): query_filter = FaceQueryFilter() query_filter.face_query_filter = FaceQueryFilterValue() query_filter.face_query_filter.uri = uri.encode('utf-8') query_filter_msg = query_filter.encode() name = Name.from_str("/localhost/nfd/faces/query") + [Component.from_bytes(query_filter_msg)] try: _, _, data = await app.express_interest(name, lifetime=1000, can_be_prefix=True, must_be_fresh=True) except (InterestCanceled, InterestTimeout, InterestNack, ValidationFailure, NetworkError): logging.error(f'Query failed') return None ret = FaceStatusMsg.parse(data) logging.info(ret) return ret.face_status[0].face_id
def test_params(): interest = (b'\x05\x26\x07\x14\x08\x05local\x08\x03ndn\x08\x06prefix' b'\x21\x00\x12\x00\x0a\x04\x00\x00\x00\x00\x0c\x01\x0a\x22\x01\x01') name, params, app_params, sig = parse_interest(interest) assert name == Name.from_str('/local/ndn/prefix') assert app_params is None assert params.can_be_prefix assert params.must_be_fresh assert params.nonce == 0 assert params.lifetime == 10 assert params.hop_limit == 1 assert sig.signature_info is None assert sig.signature_value_buf is None assert sig.digest_value_buf is None
async def app_main(self): fut1 = self.app.express_interest( '/test/sha256digest=FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF', nonce=None, lifetime=5) fut2 = self.app.express_interest( '/test/sha256digest=5488f2c11b566d49e9904fb52aa6f6f9e66a954168109ce156eea2c92c57e4c2', nonce=None, lifetime=5) name2, _, content2 = await fut2 with pytest.raises(InterestTimeout): await fut1 assert name2 == Name.from_str('/test') assert content2 == b'test'
async def run(self): await aio.sleep(2) # wait for repo to startup filepath1 = self.create_tmp_file(size_bytes=40 * 1024 * 1024) filepath2 = uuid.uuid4().hex.upper()[0:6] # put file pc = PutfileClient(self.app, Name.from_str('/putfile_client'), Name.from_str(repo_name)) await pc.insert_file(filepath1, Name.from_str(filepath2), segment_size=8000, freshness_period=0, cpu_count=multiprocessing.cpu_count()) # get file gc = GetfileClient(self.app, Name.from_str(repo_name)) await gc.fetch_file(Name.from_str(filepath2)) # diff ret = filecmp.cmp(filepath1, filepath2) assert ret # cleanup self.files_to_cleanup.append(filepath1) self.files_to_cleanup.append(filepath2) self.app.shutdown()
def main(): parser = argparse.ArgumentParser(description='python client.py') parser.add_argument('-n', '--node_prefix', required=True, help='Prefix of catalog ("/217B/repo/node/A")') parser.add_argument('-c', '--command', default='insert', choices=['insert', 'delete', 'recall'], help='Command Verb') parser.add_argument('-d', '--data_name', required=True, help='data name ("/foo/bar/1.txt")') parser.add_argument('-s', '--hash', required=True, help='data hash ("1bd109fe")') parser.add_argument('-o', '--desired_copies', type=int, default=3, help='desired copies') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp(face=None, keychain=KeychainDigest()) name = Name.from_str(args.node_prefix) name.append(Component.from_str(args.command)) datainfo_tlv_model = DatainfoTlvModel() datainfo_tlv_model.data_name = args.data_name.encode() datainfo_tlv_model.hash = args.hash.encode() datainfo_tlv_model.desired_copies = args.desired_copies datainfo_name_component = Component.from_bytes(datainfo_tlv_model.encode()) name.append(datainfo_name_component) # logging.info(name) try: app.run_forever(after_start=send(app, name)) except FileNotFoundError: logging.error('Error: could not connect to NFD.\n') return 0
async def main(app: NDNApp): """ Async helper function to run the concurrent fetcher. This function is necessary because it's responsible for calling app.shutdown(). :param app: NDNApp """ semaphore = aio.Semaphore(20) async for data_bytes in concurrent_fetcher(app, Name.from_str('/test1.pdf'), 0, 161, semaphore): (data_name, meta_info, content, sig) = ndn_format_0_3.parse_data(data_bytes, with_tl=False) print(Name.to_str(data_name)) app.shutdown()
def test_compare(): strs = [ "/", "/sha256digest=0000000000000000000000000000000000000000000000000000000000000000", "/sha256digest=0000000000000000000000000000000000000000000000000000000000000001", "/sha256digest=FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", "/params-sha256=0000000000000000000000000000000000000000000000000000000000000000", "/params-sha256=0000000000000000000000000000000000000000000000000000000000000001", "/params-sha256=FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", "/3=", "/3=D", "/3=F", "/3=AA", "//", "/D", "/D/sha256digest=0000000000000000000000000000000000000000000000000000000000000000", "/D/sha256digest=0000000000000000000000000000000000000000000000000000000000000001", "/D/sha256digest=FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", "/D/params-sha256=0000000000000000000000000000000000000000000000000000000000000000", "/D/params-sha256=0000000000000000000000000000000000000000000000000000000000000001", "/D/params-sha256=FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", "/D/3=", "/D/3=D", "/D/3=F", "/D/3=AA", "/D//", "/D/D", "/D/F", "/D/AA", "/D/21426=/", "/D/21426=D", "/D/21426=F", "/D/21426=AA", "/F", "/AA", "/21426=", "/21426=D", "/21426=F", "/21426=AA", ] names = [Name.from_str(s) for s in strs] for i, lhs in enumerate(names): for j, rhs in enumerate(names): assert (lhs == rhs) == (i == j) assert (lhs != rhs) == (i != j) assert (lhs < rhs) == (i < j) assert (lhs <= rhs) == (i <= j) assert (lhs > rhs) == (i > j) assert (lhs >= rhs) == (i >= j)
def test_state_table_update_state() -> None: mynid, seqno = Name.from_str("A"), 0 st: StateTable = StateTable(mynid) st.updateMyState(seqno) seqno += 1 st.updateMyState(seqno) seqno += 1 st.updateMyState(seqno) seqno += 1 st.updateMyState(seqno) st.updateMetaData() assert seqno == st.getSeqno(mynid) assert seqno == st.getMetaData().tseqno assert Name.to_str(mynid) == bytes(st.getMetaData().source).decode()
def test_forwarding_hint(): name = '/local/ndn/prefix' int_param = InterestParam() int_param.nonce = 0x01020304 int_param.forwarding_hint = [(0x87, '/name/A'), (0x02, Name.from_str('/ndn/B')), (0x12, b'\x07\x0d\x08\x0bshekkuenseu')] interest = make_interest(name, int_param) assert (interest == b'\x05\x55\x07\x14\x08\x05local\x08\x03ndn\x08\x06prefix' b'\x1e\x33' b'\x1f\x0e\x1e\x01\x87\x07\x09\x08\x04name\x08\x01A' b'\x1f\x0d\x1e\x01\x02\x07\x08\x08\x03ndn\x08\x01B' b'\x1f\x12\x1e\x01\x12\x07\r\x08\x0bshekkuenseu' b'\x0a\x04\x01\x02\x03\x04\x0c\x02\x0f\xa0')
async def equalize(self, incoming_md: MetaData) -> None: if incoming_md.tseqno <= self.table.getMetaData().tseqno or self.busy: return self.busy = True name = Name.from_str(bytes(incoming_md.source).decode()) for i in range(incoming_md.nopcks): self.taskWindow.addTask(self.balanceFromProp, (name, i + 1)) await self.taskWindow.gather() SVSyncLogger.info( f'SVSyncBalancer: nmeta {bytes(self.table.getMetaData().source).decode()} - {self.table.getMetaData().tseqno} total, {self.table.getMetaData().nopcks} pcks' ) SVSyncLogger.info( f'SVSyncBalancer: ntable {self.table.getCompleteStateVector().to_str()}' ) self.busy = False