def main() -> int: cmdline_args = process_cmd_opts() config = process_config(cmdline_args) print(config) config_logging(config['logging_config']) storage = create_storage(config['db_config']) app = NDNApp() pb = PubSub(app) read_handle = ReadHandle(app, storage, config) write_handle = WriteCommandHandle(app, storage, pb, read_handle, config) delete_handle = DeleteCommandHandle(app, storage, pb, read_handle, config) tcp_bulk_insert_handle = TcpBulkInsertHandle(storage, read_handle, config) repo = Repo(app, storage, read_handle, write_handle, delete_handle, tcp_bulk_insert_handle, config) aio.ensure_future(repo.listen()) try: app.run_forever() except FileNotFoundError: print('Error: could not connect to NFD.') return 0
def main(): parser = argparse.ArgumentParser(description='putfile') parser.add_argument('-r', '--repo_name', required=True, help='Name of repo') parser.add_argument('-p', '--prefix', required=True, help='Prefix of data') parser.add_argument('-s', '--start_block_id', required=True, help='Start Block ID') parser.add_argument('-e', '--end_block_id', required=True, help='End Block ID') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp() app.run_forever( after_start=run_delete_client(app, repo_name=args.repo_name, prefix=args.prefix, start_block_id=args.start_block_id, end_block_id=args.end_block_id))
def main(): parser = argparse.ArgumentParser(description='python catalog.py') parser.add_argument('-d', '--database_file', required=True, help='Path to (sqlite3) database file') parser.add_argument('-p', '--prefix', required=True, help='Prefix of Catalog ("/217B/catalog")') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp() command_handle = CommandHandle(app, args.prefix, args.database_file) # listens on /<prefix> command_handle.listen() try: app.run_forever() except FileNotFoundError: logging.error('Error: could not connect to NFD.\n') return 0
def main(): if len(sys.argv) <= 2: print(f'Usage: {sys.argv[0]} <name> <file>') exit(0) logging.basicConfig(format='[{asctime}]{levelname}:{message}', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO, style='{') app = NDNApp() name = Name.normalize(sys.argv[1]) name.append(Component.from_version(timestamp())) with open(sys.argv[2], 'rb') as f: data = f.read() seg_cnt = (len(data) + SEGMENT_SIZE - 1) // SEGMENT_SIZE packets = [app.prepare_data(name + [Component.from_segment(i)], data[i*SEGMENT_SIZE:(i+1)*SEGMENT_SIZE], freshness_period=10000, final_block_id=Component.from_segment(seg_cnt - 1)) for i in range(seg_cnt)] print(f'Created {seg_cnt} chunks under name {Name.to_str(name)}') @app.route(name) def on_interest(int_name, _int_param, _app_param): if Component.get_type(int_name[-1]) == Component.TYPE_SEGMENT: seg_no = Component.to_number(int_name[-1]) else: seg_no = 0 if seg_no < seg_cnt: app.put_raw_packet(packets[seg_no]) app.run_forever()
def main() -> int: logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) try: config = get_yaml() logging.info(config) app = NDNApp() storage = SqliteStorage() read_handle = ReadHandle(app, storage) write_handle = WriteCommandHandle(app, storage, read_handle) delete_handle = DeleteCommandHandle(app, storage) tcp_bulk_insert_handle = TcpBulkInsertHandle( storage, read_handle, config['tcp_bulk_insert']['addr'], config['tcp_bulk_insert']['port']) repo = Repo(Name.from_str(config['repo_config']['repo_name']), app, storage, read_handle, write_handle, delete_handle, tcp_bulk_insert_handle) repo.listen() app.run_forever() except KeyboardInterrupt: pass return 0
async def run_getfile_client(app: NDNApp, **kwargs): """ Async helper function to run the GetfileClient. This function is necessary because it's responsible for calling app.shutdown(). """ client = GetfileClient(app, kwargs['repo_name']) await client.fetch_file(kwargs['name_at_repo']) app.shutdown()
async def run_putfile_client(app: NDNApp, **kwargs): """ Async helper function to run the PutfileClient. This function is necessary because it's responsible for calling app.shutdown(). """ client = PutfileClient(app, Name.from_str(kwargs['repo_name'])) await client.insert_file(kwargs['file_path'], Name.from_str(kwargs['name_at_repo'])) app.shutdown()
def loop_task(): self.app = NDNApp(self.face, self.keychain) try: self.app.run_forever(after_start=self.function()) except FileNotFoundError: print(f'Error: could not connect to NFD for SVS.') self.failed = True sys.exit()
def main(): parser = argparse.ArgumentParser(description='python reponode.py') parser.add_argument('-r', '--repo_prefix', required=True, help='Prefix of Repo ("/217B/repo")') parser.add_argument('-c', '--catalog_prefix', required=True, help='Prefix of Catalog ("/217B/catalog")') parser.add_argument('-n', '--node_name', required=True, help='Node name ("node/A")') parser.add_argument('-p', '--period', type=int, default=10, help='Update period in second') parser.add_argument('-f', '--files', nargs='+', default=[], help='List of uncorrupted files') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp(face=None, keychain=KeychainDigest()) reponode_client = ReponodeClient(app) command_client = CommandClient(app, reponode_client, args.repo_prefix, args.catalog_prefix, args.node_name, args.period) insert_handle = InsertHandle(app, command_client, args.node_name, args.repo_prefix) delete_handle = DeleteHandle(app, command_client, args.node_name, args.repo_prefix) recall_handle = RecallHandle(app, command_client, args.node_name, args.repo_prefix) # TODO: start command_client's periodically timer # # listens on /<repo_prefix>/<node_name>/insert insert_handle.listen() # listens on /<repo_prefix>/<node_name>/delete delete_handle.listen() # listens on /<repo_prefix>/<node_name>/recall recall_handle.listen() try: app.run_forever( after_start=cmd(command_client, args.period, args.files)) except FileNotFoundError: logging.error('Error: could not connect to NFD.\n') return 0
def main(): logging.basicConfig(format='[{asctime}]{levelname}:{message}', datefmt='%Y-%m-%d %H:%M:%S', level=logging.DEBUG, style='{', stream=sys.stderr) load_dotenv() app = NDNApp() app.run_forever(after_start=after_start(app))
async def run_delete_client(app: NDNApp, **kwargs): """ Async helper function to run the DeleteClient. This function is necessary because it's responsible for calling app.shutdown(). """ client = DeleteClient(app, kwargs['client_prefix'], kwargs['repo_name']) await client.delete_file(kwargs['name_at_repo'], kwargs['start_block_id'], kwargs['end_block_id']) app.shutdown()
def loop_task() -> None: self.app = NDNApp(self.face, self.keychain) if issubclass(type(self.storage), DiskStorage): self.storage.initialize() try: self.app.run_forever(after_start=self.function()) except (FileNotFoundError, ConnectionRefusedError): print("Error: could not connect to NFD for SVS.") self.failed = True sys.exit()
async def run_putfile_client(app: NDNApp, **kwargs): """ Async helper function to run the PutfileClient. This function is necessary because it's responsible for calling app.shutdown(). """ client = PutfileClient(app, kwargs['client_prefix'], kwargs['repo_name']) await client.insert_file(kwargs['file_path'], kwargs['name_at_repo'], kwargs['segment_size'], kwargs['freshness_period'], kwargs['cpu_count']) app.shutdown()
def startup(self): self.files_to_cleanup = [] tmp_cfg_path = self.create_tmp_cfg() self.files_to_cleanup.append(tmp_cfg_path) self.files_to_cleanup.append(sqlite3_path) self.repo_proc = subprocess.Popen( ['ndn-python-repo', '-c', tmp_cfg_path]) self.app = NDNApp(face=None, keychain=KeychainDigest()) self.app.run_forever(after_start=self.run())
async def run_check(app: NDNApp, **kwargs): """ Async helper function to run the CommandChecker. This function is necessary because it's responsible for calling app.shutdown(). """ client = CommandChecker(app) response = await client.check_insert(kwargs['repo_name'], kwargs['process_id']) if response: status_code = response.status_code print('Status Code: {}'.format(status_code)) app.shutdown()
async def run_publisher(app: NDNApp, publisher_prefix: NonStrictName): pb = PubSub(app, publisher_prefix) await pb.wait_for_ready() topic = Name.from_str('/topic_foo') msg = f'pubsub message generated at {str(datetime.datetime.now())}'.encode( ) pb.publish(topic, msg) # wait for msg to be fetched by subsciber await aio.sleep(10) app.shutdown()
def main(): logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) publisher_prefix = Name.from_str('/test_publisher') app = NDNApp() try: app.run_forever(after_start=run_publisher(app, publisher_prefix)) except FileNotFoundError: logging.warning('Error: could not connect to NFD')
def main(): if len(sys.argv) < 3: print("Usage: python3 admin <broker> [<command>]") sys.exit(1) app = NDNApp() keeper = PSKCmd(app) prefix, cmd = sys.argv[1:3] args = sys.argv[3:] try: app.run_forever(after_start=handle(app, keeper, cmd, prefix, args)) except (InterestNack, InterestTimeout): print(f"Broker {prefix} unreachable or timeout")
def main(): parser = argparse.ArgumentParser(description='putfile') parser.add_argument('-r', '--repo_name', required=True, help='Name of repo') parser.add_argument('-f', '--file_path', required=True, help='Path to input file') parser.add_argument('-n', '--name_at_repo', required=True, help='Prefix used to store file at Repo') parser.add_argument('--client_prefix', required=False, default='/putfile_client', help='prefix of this client') parser.add_argument('--segment_size', type=int, required=False, default=8000, help='Size of each data packet') parser.add_argument('--freshness_period', type=int, required=False, default=0, help='Data packet\'s freshness period') parser.add_argument('--cpu_count', type=int, required=False, default=multiprocessing.cpu_count(), help='Number of cores to use') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp(face=None, keychain=KeychainDigest()) try: app.run_forever(after_start=run_putfile_client( app, repo_name=Name.from_str(args.repo_name), file_path=args.file_path, name_at_repo=Name.from_str(args.name_at_repo), client_prefix=Name.from_str(args.client_prefix), segment_size=args.segment_size, freshness_period=args.freshness_period, cpu_count=args.cpu_count)) except FileNotFoundError: print('Error: could not connect to NFD.')
def main(): parser = argparse.ArgumentParser(description='delfile') parser.add_argument('-r', '--repo_name', required=True, help='Name of repo') parser.add_argument('-n', '--name_at_repo', required=True, help='Name used to store file at Repo') parser.add_argument('-s', '--start_block_id', required=False, help='Start Block ID') parser.add_argument('-e', '--end_block_id', required=False, help='End Block ID') parser.add_argument('--client_prefix', required=False, default='/delfile_client', help='prefix of this client') parser.add_argument('--register_prefix', default=None, help='The prefix repo should register') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) # process default values start_block_id = int(args.start_block_id) if args.start_block_id else None end_block_id = int(args.end_block_id) if args.end_block_id else None if args.register_prefix == None: args.register_prefix = args.name_at_repo args.register_prefix = Name.from_str(args.register_prefix) app = NDNApp() try: app.run_forever(after_start=run_delete_client( app, repo_name=Name.from_str(args.repo_name), name_at_repo=Name.from_str(args.name_at_repo), start_block_id=start_block_id, end_block_id=end_block_id, client_prefix=Name.from_str(args.client_prefix), register_prefix=args.register_prefix)) except FileNotFoundError: print('Error: could not connect to NFD.')
def main(): parser = argparse.ArgumentParser(description='segmented insert client') parser.add_argument('-r', '--repo_name', required=True, help='Name of repo') parser.add_argument('-p', '--process_id', required=True, help="Process ID") args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp() app.run_forever(after_start=run_check(app, repo_name=Name.from_str(args.repo_name), process_id=int(args.process_id)))
def main(): parser = argparse.ArgumentParser(description='python client.py') parser.add_argument('-n', '--node_prefix', required=True, help='Prefix of catalog ("/217B/repo/node/A")') parser.add_argument('-c', '--command', default='insert', choices=['insert', 'delete', 'recall'], help='Command Verb') parser.add_argument('-d', '--data_name', required=True, help='data name ("/foo/bar/1.txt")') parser.add_argument('-s', '--hash', required=True, help='data hash ("1bd109fe")') parser.add_argument('-o', '--desired_copies', type=int, default=3, help='desired copies') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp(face=None, keychain=KeychainDigest()) name = Name.from_str(args.node_prefix) name.append(Component.from_str(args.command)) datainfo_tlv_model = DatainfoTlvModel() datainfo_tlv_model.data_name = args.data_name.encode() datainfo_tlv_model.hash = args.hash.encode() datainfo_tlv_model.desired_copies = args.desired_copies datainfo_name_component = Component.from_bytes(datainfo_tlv_model.encode()) name.append(datainfo_name_component) # logging.info(name) try: app.run_forever(after_start=send(app, name)) except FileNotFoundError: logging.error('Error: could not connect to NFD.\n') return 0
async def main(app: NDNApp): """ Async helper function to run the concurrent fetcher. This function is necessary because it's responsible for calling app.shutdown(). :param app: NDNApp """ semaphore = aio.Semaphore(20) async for data_bytes in concurrent_fetcher(app, Name.from_str('/test1.pdf'), 0, 161, semaphore): (data_name, meta_info, content, sig) = ndn_format_0_3.parse_data(data_bytes, with_tl=False) print(Name.to_str(data_name)) app.shutdown()
def main(): if len(sys.argv) < 3: print("Usage:", sys.argv[0], "remote-name url", file=sys.stderr) return -1 if 'GIT_DIR' in os.environ: local_repo_path = os.environ['GIT_DIR'] else: local_repo_path = os.path.join(os.getcwd(), ".git") repo_prefix = sys.argv[2] repo_name = repo_prefix.split('/')[-1] git_repo = GitRepo(repo_name, local_repo_path) app = NDNApp() app.run_forever(after_start=after_start(app, repo_prefix, repo_name, git_repo, local_repo_path))
def __init__(self, emit_func): self.emit = emit_func self.running = True self.networking_ready = False self.listen_to_boot_request = False self.listen_to_cert_request = False self.boot_state = None self.app = NDNApp() self.system_prefix = None self.system_anchor = None self.db = None self.device_list = DeviceList() self.service_list = ServiceList() self.access_list = AccessList() self.shared_secret_list = SharedSecrets()
def main(): parser = argparse.ArgumentParser(description='getfile') parser.add_argument('-r', '--repo_name', required=True, help='Name of repo') parser.add_argument('-n', '--name_at_repo', required=True, help='Name used to store file at Repo') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.DEBUG) app = NDNApp() try: app.run_forever( after_start=run_getfile_client(app, repo_name=Name.from_str(args.repo_name), name_at_repo=Name.from_str(args.name_at_repo))) except FileNotFoundError: print('Error: could not connect to NFD.')
async def run_putfile_client(app: NDNApp, **kwargs): """ Async helper function to run the PutfileClient. This function is necessary because it's responsible for calling app.shutdown(). """ print(kwargs) client = PutfileClient(app=app, prefix=kwargs['client_prefix'], repo_name=kwargs['repo_name']) await client.insert_file(file_path=kwargs['file_path'], name_at_repo=kwargs['name_at_repo'], segment_size=kwargs['segment_size'], freshness_period=kwargs['freshness_period'], cpu_count=kwargs['cpu_count'], forwarding_hint=kwargs['forwarding_hint'], register_prefix=kwargs['register_prefix']) app.shutdown()
class NDNAppTestSuite: app = None def test_main(self): face = DummyFace(self.face_proc) keychain = KeychainDigest() self.app = NDNApp(face, keychain) face.app = self.app self.app.run_forever(after_start=self.app_main()) @abc.abstractmethod async def face_proc(self, face: DummyFace): pass @abc.abstractmethod async def app_main(self): pass
def _create_packets(name, content, freshness_period, final_block_id): """ Worker for parallelize prepare_data(). This function has to be defined at the top level, so that it can be pickled and used by multiprocessing. """ # The keychain's sqlite3 connection is not thread-safe. Create a new NDNApp instance for # each process, so that each process gets a separate sqlite3 connection global app_to_create_packet if app_to_create_packet is None: app_to_create_packet = NDNApp() packet = app_to_create_packet.prepare_data( name, content, freshness_period=freshness_period, final_block_id=final_block_id) return bytes(packet)
def main(): parser = argparse.ArgumentParser(description='getfile') parser.add_argument('-r', '--repo_name', required=True, help='Name of repo') parser.add_argument('-n', '--name_at_repo', required=True, help='Name used to store file at Repo') args = parser.parse_args() logging.basicConfig(format='[%(asctime)s]%(levelname)s:%(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO) app = NDNApp() app.run_forever(after_start=run_getfile_client( app, repo_name=args.repo_name, name_at_repo=args.name_at_repo))