def main(): parser = argparse.ArgumentParser(description='python catalog.py') parser.add_argument('-d', '--database_file', required=True, help='Path to (sqlite3) database file') parser.add_argument('-p', '--prefix', required=True, help='Prefix of Catalog ("/217B/catalog")') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp() command_handle = CommandHandle(app, args.prefix, args.database_file) # listens on /<prefix> command_handle.listen() try: app.run_forever() except FileNotFoundError: logging.error('Error: could not connect to NFD.\n') return 0
def main(): if len(sys.argv) <= 2: print(f'Usage: {sys.argv[0]} <name> <file>') exit(0) logging.basicConfig(format='[{asctime}]{levelname}:{message}', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO, style='{') app = NDNApp() name = Name.normalize(sys.argv[1]) name.append(Component.from_version(timestamp())) with open(sys.argv[2], 'rb') as f: data = f.read() seg_cnt = (len(data) + SEGMENT_SIZE - 1) // SEGMENT_SIZE packets = [app.prepare_data(name + [Component.from_segment(i)], data[i*SEGMENT_SIZE:(i+1)*SEGMENT_SIZE], freshness_period=10000, final_block_id=Component.from_segment(seg_cnt - 1)) for i in range(seg_cnt)] print(f'Created {seg_cnt} chunks under name {Name.to_str(name)}') @app.route(name) def on_interest(int_name, _int_param, _app_param): if Component.get_type(int_name[-1]) == Component.TYPE_SEGMENT: seg_no = Component.to_number(int_name[-1]) else: seg_no = 0 if seg_no < seg_cnt: app.put_raw_packet(packets[seg_no]) app.run_forever()
def main() -> int: cmdline_args = process_cmd_opts() config = process_config(cmdline_args) print(config) config_logging(config['logging_config']) storage = create_storage(config['db_config']) app = NDNApp() pb = PubSub(app) read_handle = ReadHandle(app, storage, config) write_handle = WriteCommandHandle(app, storage, pb, read_handle, config) delete_handle = DeleteCommandHandle(app, storage, pb, read_handle, config) tcp_bulk_insert_handle = TcpBulkInsertHandle(storage, read_handle, config) repo = Repo(app, storage, read_handle, write_handle, delete_handle, tcp_bulk_insert_handle, config) aio.ensure_future(repo.listen()) try: app.run_forever() except FileNotFoundError: print('Error: could not connect to NFD.') return 0
def main() -> int: logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) try: config = get_yaml() logging.info(config) app = NDNApp() storage = SqliteStorage() read_handle = ReadHandle(app, storage) write_handle = WriteCommandHandle(app, storage, read_handle) delete_handle = DeleteCommandHandle(app, storage) tcp_bulk_insert_handle = TcpBulkInsertHandle( storage, read_handle, config['tcp_bulk_insert']['addr'], config['tcp_bulk_insert']['port']) repo = Repo(Name.from_str(config['repo_config']['repo_name']), app, storage, read_handle, write_handle, delete_handle, tcp_bulk_insert_handle) repo.listen() app.run_forever() except KeyboardInterrupt: pass return 0
def main(): parser = argparse.ArgumentParser(description='putfile') parser.add_argument('-r', '--repo_name', required=True, help='Name of repo') parser.add_argument('-p', '--prefix', required=True, help='Prefix of data') parser.add_argument('-s', '--start_block_id', required=True, help='Start Block ID') parser.add_argument('-e', '--end_block_id', required=True, help='End Block ID') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp() app.run_forever( after_start=run_delete_client(app, repo_name=args.repo_name, prefix=args.prefix, start_block_id=args.start_block_id, end_block_id=args.end_block_id))
def main(): parser = argparse.ArgumentParser(description='python reponode.py') parser.add_argument('-r', '--repo_prefix', required=True, help='Prefix of Repo ("/217B/repo")') parser.add_argument('-c', '--catalog_prefix', required=True, help='Prefix of Catalog ("/217B/catalog")') parser.add_argument('-n', '--node_name', required=True, help='Node name ("node/A")') parser.add_argument('-p', '--period', type=int, default=10, help='Update period in second') parser.add_argument('-f', '--files', nargs='+', default=[], help='List of uncorrupted files') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp(face=None, keychain=KeychainDigest()) reponode_client = ReponodeClient(app) command_client = CommandClient(app, reponode_client, args.repo_prefix, args.catalog_prefix, args.node_name, args.period) insert_handle = InsertHandle(app, command_client, args.node_name, args.repo_prefix) delete_handle = DeleteHandle(app, command_client, args.node_name, args.repo_prefix) recall_handle = RecallHandle(app, command_client, args.node_name, args.repo_prefix) # TODO: start command_client's periodically timer # # listens on /<repo_prefix>/<node_name>/insert insert_handle.listen() # listens on /<repo_prefix>/<node_name>/delete delete_handle.listen() # listens on /<repo_prefix>/<node_name>/recall recall_handle.listen() try: app.run_forever( after_start=cmd(command_client, args.period, args.files)) except FileNotFoundError: logging.error('Error: could not connect to NFD.\n') return 0
def main(): logging.basicConfig(format='[{asctime}]{levelname}:{message}', datefmt='%Y-%m-%d %H:%M:%S', level=logging.DEBUG, style='{', stream=sys.stderr) load_dotenv() app = NDNApp() app.run_forever(after_start=after_start(app))
def main(): logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) publisher_prefix = Name.from_str('/test_publisher') app = NDNApp() try: app.run_forever(after_start=run_publisher(app, publisher_prefix)) except FileNotFoundError: logging.warning('Error: could not connect to NFD')
def main(): if len(sys.argv) < 3: print("Usage: python3 admin <broker> [<command>]") sys.exit(1) app = NDNApp() keeper = PSKCmd(app) prefix, cmd = sys.argv[1:3] args = sys.argv[3:] try: app.run_forever(after_start=handle(app, keeper, cmd, prefix, args)) except (InterestNack, InterestTimeout): print(f"Broker {prefix} unreachable or timeout")
def main(): parser = argparse.ArgumentParser(description='putfile') parser.add_argument('-r', '--repo_name', required=True, help='Name of repo') parser.add_argument('-f', '--file_path', required=True, help='Path to input file') parser.add_argument('-n', '--name_at_repo', required=True, help='Prefix used to store file at Repo') parser.add_argument('--client_prefix', required=False, default='/putfile_client', help='prefix of this client') parser.add_argument('--segment_size', type=int, required=False, default=8000, help='Size of each data packet') parser.add_argument('--freshness_period', type=int, required=False, default=0, help='Data packet\'s freshness period') parser.add_argument('--cpu_count', type=int, required=False, default=multiprocessing.cpu_count(), help='Number of cores to use') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp(face=None, keychain=KeychainDigest()) try: app.run_forever(after_start=run_putfile_client( app, repo_name=Name.from_str(args.repo_name), file_path=args.file_path, name_at_repo=Name.from_str(args.name_at_repo), client_prefix=Name.from_str(args.client_prefix), segment_size=args.segment_size, freshness_period=args.freshness_period, cpu_count=args.cpu_count)) except FileNotFoundError: print('Error: could not connect to NFD.')
def main(): parser = argparse.ArgumentParser(description='delfile') parser.add_argument('-r', '--repo_name', required=True, help='Name of repo') parser.add_argument('-n', '--name_at_repo', required=True, help='Name used to store file at Repo') parser.add_argument('-s', '--start_block_id', required=False, help='Start Block ID') parser.add_argument('-e', '--end_block_id', required=False, help='End Block ID') parser.add_argument('--client_prefix', required=False, default='/delfile_client', help='prefix of this client') parser.add_argument('--register_prefix', default=None, help='The prefix repo should register') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) # process default values start_block_id = int(args.start_block_id) if args.start_block_id else None end_block_id = int(args.end_block_id) if args.end_block_id else None if args.register_prefix == None: args.register_prefix = args.name_at_repo args.register_prefix = Name.from_str(args.register_prefix) app = NDNApp() try: app.run_forever(after_start=run_delete_client( app, repo_name=Name.from_str(args.repo_name), name_at_repo=Name.from_str(args.name_at_repo), start_block_id=start_block_id, end_block_id=end_block_id, client_prefix=Name.from_str(args.client_prefix), register_prefix=args.register_prefix)) except FileNotFoundError: print('Error: could not connect to NFD.')
def main(): parser = argparse.ArgumentParser(description='python client.py') parser.add_argument('-n', '--node_prefix', required=True, help='Prefix of catalog ("/217B/repo/node/A")') parser.add_argument('-c', '--command', default='insert', choices=['insert', 'delete', 'recall'], help='Command Verb') parser.add_argument('-d', '--data_name', required=True, help='data name ("/foo/bar/1.txt")') parser.add_argument('-s', '--hash', required=True, help='data hash ("1bd109fe")') parser.add_argument('-o', '--desired_copies', type=int, default=3, help='desired copies') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp(face=None, keychain=KeychainDigest()) name = Name.from_str(args.node_prefix) name.append(Component.from_str(args.command)) datainfo_tlv_model = DatainfoTlvModel() datainfo_tlv_model.data_name = args.data_name.encode() datainfo_tlv_model.hash = args.hash.encode() datainfo_tlv_model.desired_copies = args.desired_copies datainfo_name_component = Component.from_bytes(datainfo_tlv_model.encode()) name.append(datainfo_name_component) # logging.info(name) try: app.run_forever(after_start=send(app, name)) except FileNotFoundError: logging.error('Error: could not connect to NFD.\n') return 0
def main(): parser = argparse.ArgumentParser(description='segmented insert client') parser.add_argument('-r', '--repo_name', required=True, help='Name of repo') parser.add_argument('-p', '--process_id', required=True, help="Process ID") args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp() app.run_forever(after_start=run_check(app, repo_name=Name.from_str(args.repo_name), process_id=int(args.process_id)))
def main(): if len(sys.argv) < 3: print("Usage:", sys.argv[0], "remote-name url", file=sys.stderr) return -1 if 'GIT_DIR' in os.environ: local_repo_path = os.environ['GIT_DIR'] else: local_repo_path = os.path.join(os.getcwd(), ".git") repo_prefix = sys.argv[2] repo_name = repo_prefix.split('/')[-1] git_repo = GitRepo(repo_name, local_repo_path) app = NDNApp() app.run_forever(after_start=after_start(app, repo_prefix, repo_name, git_repo, local_repo_path))
def main(): parser = argparse.ArgumentParser(description='getfile') parser.add_argument('-r', '--repo_name', required=True, help='Name of repo') parser.add_argument('-n', '--name_at_repo', required=True, help='Name used to store file at Repo') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.DEBUG) app = NDNApp() try: app.run_forever( after_start=run_getfile_client(app, repo_name=Name.from_str(args.repo_name), name_at_repo=Name.from_str(args.name_at_repo))) except FileNotFoundError: print('Error: could not connect to NFD.')
class NDNAppTestSuite: app = None def test_main(self): face = DummyFace(self.face_proc) keychain = KeychainDigest() self.app = NDNApp(face, keychain) face.app = self.app self.app.run_forever(after_start=self.app_main()) @abc.abstractmethod async def face_proc(self, face: DummyFace): pass @abc.abstractmethod async def app_main(self): pass
def main(): config = get_yaml() logging.info(config) app = NDNApp() storage = SqliteStorage() read_handle = ReadHandle(app, storage) write_handle = WriteCommandHandle(app, storage, read_handle) delete_handle = DeleteCommandHandle(app, storage) tcp_bulk_insert_handle = TcpBulkInsertHandle(storage, read_handle, config['tcp_bulk_insert']['addr'], config['tcp_bulk_insert']['port']) repo = Repo(Name.from_str(config['repo_config']['repo_name']), app, storage, read_handle, write_handle, delete_handle, tcp_bulk_insert_handle) repo.listen() app.run_forever()
def main(): basedir = os.path.dirname(os.path.abspath(sys.argv[0])) tpm_path = os.path.join(basedir, 'privKeys') pib_path = os.path.join(basedir, 'pib.db') keychain = KeychainSqlite3(pib_path, TpmFile(tpm_path)) trust_anchor = keychain['/lvs-test'].default_key().default_cert() print(f'Trust anchor name: {Name.to_str(trust_anchor.name)}') lvs_model = compile_lvs(lvs_text) checker = Checker(lvs_model, DEFAULT_USER_FNS) app = NDNApp(keychain=keychain) validator = lvs_validator(checker, app, trust_anchor.data) async def fetch_interest(article: str): try: name = Name.from_str(f'/lvs-test/article/xinyu/{article}') print(f'Sending Interest {Name.to_str(name)}') data_name, meta_info, content = await app.express_interest( name, must_be_fresh=True, can_be_prefix=True, lifetime=6000, validator=validator) print(f'Received Data Name: {Name.to_str(data_name)}') print(meta_info) print(bytes(content).decode() if content else None) except InterestNack as e: print(f'Nacked with reason={e.reason}') except InterestTimeout: print(f'Timeout') except InterestCanceled: print(f'Canceled') except ValidationFailure: print(f'Data failed to validate') async def ndn_main(): await fetch_interest('hello') await fetch_interest('world') app.shutdown() app.run_forever(ndn_main())
def main(): parser = argparse.ArgumentParser(description='getfile') parser.add_argument('-r', '--repo_name', required=True, help='Name of repo') parser.add_argument('-n', '--name_at_repo', required=True, help='Name used to store file at Repo') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp() app.run_forever(after_start=run_getfile_client( app, repo_name=args.repo_name, name_at_repo=args.name_at_repo))
def main() -> int: cmdline_args = process_cmd_opts() configuration = process_config(cmdline_args) storage = create_storage(configuration['db_config']) app = NDNApp() read_handle = ReadHandle(app, storage) write_handle = WriteHandle(app, storage, read_handle) catalog = Catalog( Name.from_str(configuration['catalog_config']['catalog_name']), app, storage, read_handle, write_handle) aio.ensure_future(catalog.listen()) try: app.run_forever() except FileNotFoundError: print('Error: could not connect to NFD.') return 0
class RepoTestSuite(object): def test_main(self): # could not get NFD running on travis macos, skipping ... if os.getenv('TRAVIS') and platform.system() == 'Darwin': return self.startup() self.cleanup() def startup(self): self.files_to_cleanup = [] tmp_cfg_path = self.create_tmp_cfg() self.files_to_cleanup.append(tmp_cfg_path) self.files_to_cleanup.append(sqlite3_path) self.repo_proc = subprocess.Popen( ['ndn-python-repo', '-c', tmp_cfg_path]) self.app = NDNApp(face=None, keychain=KeychainDigest()) self.app.run_forever(after_start=self.run()) def cleanup(self): self.repo_proc.kill() for file in self.files_to_cleanup: if os.path.exists(file): print('Cleaning up tmp file:', file) os.remove(file) def create_tmp_file(self, size_bytes=4096): tmp_file_path = os.path.join(tempfile.mkdtemp(), 'tempfile') with open(tmp_file_path, 'wb') as f: f.write(os.urandom(size_bytes)) return tmp_file_path def create_tmp_cfg(self): tmp_cfg_path = os.path.join(tempfile.mkdtemp(), 'ndn-python-repo.cfg') with open(tmp_cfg_path, 'w') as f: f.write(inline_cfg) return tmp_cfg_path async def run(self): pass
def main(): if len(sys.argv) <= 2: print(f'Usage: {sys.argv[0]} <name> <file>') exit(0) logging.basicConfig(format='[{asctime}]{levelname}:{message}', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO, style='{') app = NDNApp() name = Name.normalize(sys.argv[1]) with open(sys.argv[2], 'rb') as f: data = f.read() seg_cnt = (len(data) + SEGMENT_SIZE - 1) // SEGMENT_SIZE packets = [ data[i * SEGMENT_SIZE:(i + 1) * SEGMENT_SIZE] for i in range(seg_cnt) ] print(f'Created {seg_cnt} chunks under name {Name.to_str(name)}') @app.route(name) def on_interest(inst_name: FormalName, inst_param: InterestParam, app_param: BinaryStr): d = json.loads(app_param.tobytes().decode()) enc_session_key = base64.b64decode(d['enc_session_key']) nonce = base64.b64decode(d['nonce']) if Component.get_type(inst_name[-1]) == Component.TYPE_SEGMENT: seg_no = Component.to_number(inst_name[-1]) else: seg_no = 0 if seg_no < seg_cnt: app.put_data(inst_name, packets[seg_no], final_block_id=Component.from_segment(seg_cnt - 1), freshness_period=10000) app.run_forever()
async def pub_tests(app, names, seq): client = Pubsub(app) for name in names: try: await client.pubadv(name, redefine=False) except (InterestNack, InterestTimeout): print('Broker unreachable or interest timeout') sys.exit() while True: try: for name in names: data = '{0:%Y/%m/%d %H:%M:%S}'.format(datetime.datetime.now()) await client.pubdata(name, data, seq) print(f'Published {data} to {name}[{seq}]') seq += 1 except (InterestNack, InterestTimeout): print('Broker unreachable or interest timeout') await asyncio.sleep(1) except Exception as e: print(f'{type(e).__name__} {str(e)}') finally: pass await asyncio.sleep(0.5) if __name__ == "__main__": app = NDNApp() start = int(sys.argv[1]) if len(sys.argv) > 1 else 1 app.run_forever(after_start=pub_tests(app, ["/hello", "/adios/psdcnv1", "/byebye"], start))
import logging logging.basicConfig(format='[{asctime}]{levelname}:{message}', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO, style='{') app = NDNApp() @app.route('/example/rpc') def on_interest(name: FormalName, param: InterestParam, app_param: Optional[BinaryStr]): app_param = bytes(app_param) print(f'>> I: {Name.to_str(name)}, {param}, {app_param}') if not app_param: print("<< No application parameter, dropped") return s = sum(int(x) for x in app_param.split()) content = str(s).encode() app.put_data(name, content=content, freshness_period=500) print(f'<< D: {Name.to_str(name)}') print(MetaInfo(freshness_period=500)) print(f'Content: {content}') print('') if __name__ == '__main__': app.run_forever()
data_name, meta_info, content = await app.express_interest( insterest_name, must_be_fresh=True, can_be_prefix=True, lifetime=6000) return data_name, meta_info, content except InterestNack as e: # A NACK is received print(f'Nacked with reason={e.reason}') except InterestTimeout: # Interest times out raise InterestTimeout except InterestCanceled: # Connection to NFD is broken print(f'Canceled') except ValidationFailure: # Validation failure print(f'Data failed to validate') async def run(): interest_name = "/ndn/gr/edu/mmlab1/%40GUEST/nikosft%40gmail.com/file1" print(f'{time.time() - start_time} \t sending interest') data_name, meta_info, content = await express_interest(interest_name) await metadata_received(interest_name, data_name, meta_info, content) app.shutdown() if __name__ == '__main__': app.run_forever(after_start=run())
from ndn.types import * from ndn.app import NDNApp from ndn.encoding import Name app = NDNApp() @app.route('/217B_Repo/catalog') def on_interest(name, interest_param, application_param): print(f'Received Interest Name: {Name.to_str(name)}') app.put_data(name, content=b'zixuan', freshness_period=10000) async def main(): pass app.run_forever(after_start=main())
cmd_param.data_name = 'data1' cmd_param_bytes = cmd_param.encode() name = Name.from_str(catalog_name) name += [method] name += [str(gen_nonce())] try: _, _, data_bytes = await self.app.express_interest( name, app_param=cmd_param_bytes, must_be_fresh=True, can_be_prefix=False, lifetime=10000) data_recvd = bytes(data_bytes) assert bytes(repo_name) == data_recvd except InterestNack: print(">>>NACK") return None except InterestTimeout: print(">>>TIMEOUT") return None finally: app.shutdown() # return cmd_response if __name__ == "__main__": app = NDNApp(keychain=KeychainDigest()) intChecker = InterestChecker(app) app.run_forever(after_start=intChecker.check_interest("data2", "/catalog"))
from psdcnv2.clients import Pubsub from ndn.app import NDNApp import asyncio, random, sys async def validation_test(app, names): client = Pubsub(app) for name in names: await client.pubadv(name, redefine=True) await client.pubdata(name, "Data-" + str(random.randint(1,100))) print("[Without service token]") matches = await client.subtopic("/test/#") for rn_name, dataname in matches: print(f"{dataname}@{rn_name}") print() print("[With valid service token]") matches = await client.subtopic("/test/#", servicetoken="hasta la vista") for rn_name, dataname in matches: print(f"{dataname}@{rn_name}") sys.exit() if __name__ == '__main__': app = NDNApp() start = int(sys.argv[1]) if len(sys.argv) > 1 else 1 app.run_forever(after_start=validation_test(app, ["/test/validation", "/test/without/validation"]))
class SVSyncBase_Thread(Thread): def __init__(self, groupPrefix: Name, nid: Name, updateCallback: Callable, storage: Optional[Storage] = None, face: Optional[Face] = None, keychain: Optional[Keychain] = None) -> None: logging.info(f'SVSync_Thread: Created thread to push SVS to.') Thread.__init__(self) self.groupPrefix = groupPrefix self.nid = nid self.updateCallback = updateCallback self.storage = storage self.face = face self.keychain = keychain self.svs = None self.loop = None self.app = None self.failed = False def wait(self): while self.svs == None: time.sleep(0.001) if self.failed: sys.exit() def run(self) -> None: def loop_task(): self.app = NDNApp(self.face, self.keychain) try: self.app.run_forever(after_start=self.function()) except FileNotFoundError: print(f'Error: could not connect to NFD for SVS.') self.failed = True sys.exit() self.loop = aio.new_event_loop() aio.set_event_loop(self.loop) self.loop.create_task(loop_task()) self.loop.run_forever() async def function(self) -> None: raise NotImplementedError def missing_callback(self, missing_list: List[MissingData]) -> None: aio.ensure_future(self.updateCallback(self)(missing_list)) def getSVSync(self) -> SVSyncBase: return self.svs async def fetchData(self, nid: Name, seqNum: int, retries: int = 0) -> Optional[bytes]: await self.svs.fetchData(nid, seqNum, retries) def publishData(self, data: bytes) -> None: self.svs.publishData(data) def getCore(self) -> SVSyncCore: return self.svs.getCore()
matches = await client.subtopic(topic, exclude=exclude) if len(matches) == 0: return {} seq = int(seq) _values = {} for (rn_name, dataname) in matches: _, lst, _, _ = await client.submani(rn_name, dataname) if lst > 0: data = await client.subdata(rn_name, dataname, seq) # last element _idx = seq if seq > 0 else lst + seq _values[f"{dataname}[{_idx}]"] = data return _values async def ps_demo(app): # Make a Pubsub client client = Pubsub(app) # Publish some data await publish(client, '/b/new_world', 'Utopia or Distopia-move', 4) await publish(client, '/b/no/where', 'Nowhere fast-move', 3) # Find matches to a topic values = await subscribe(client, "/b/#") print(values) app.shutdown() if __name__ == "__main__": app = NDNApp() app.run_forever(after_start=ps_demo(app))