def prepare_loop(config_values, manager=None, idx=1): from aleph.model import init_db from aleph.web import app from configmanager import Config from aleph.config import get_defaults from aleph.services.ipfs.common import get_ipfs_api from aleph.services.p2p import init_p2p, http from aleph.services import filestore # uvloop.install() # manager = NodeManager() # manager.start() if isinstance(manager, tuple): manager_info = manager DBManager.register("_set_value") DBManager.register("_get_value") manager = DBManager(address=manager_info[0], authkey=manager_info[1]) manager.connect() filestore._set_value = function_proxy(manager, "_set_value") filestore._get_value = function_proxy(manager, "_get_value") http.SESSION = None # loop = asyncio.new_event_loop() # asyncio.set_event_loop(loop) loop = asyncio.get_event_loop() config = Config(schema=get_defaults()) app["config"] = config config.load_values(config_values) init_db(config, ensure_indexes=False) loop.run_until_complete(get_ipfs_api(timeout=2, reset=True)) tasks = loop.run_until_complete(init_p2p(config, listen=False, port_id=idx)) return loop, tasks
def main(args): """Main entry point allowing external calls Args: args ([str]): command line parameter list """ uvloop.install() args = parse_args(args) setup_logging(args.loglevel) LOGGER.info("Starting up.") config = Config(schema=get_defaults()) app['config'] = config app.config = config config.aleph.port.value = args.port config.aleph.host.value = args.host if args.config_file is not None: app['config'].yaml.load(args.config_file) model.init_db(config, ensure_indexes=(not args.debug)) LOGGER.info("Database initialized.") init_cors() setup_listeners(config) start_connector(config) loop = asyncio.get_event_loop() handler = app.make_handler() f = loop.create_server(handler, config.aleph.host.value, config.aleph.port.value) srv = loop.run_until_complete(f) LOGGER.info('Serving on %s', srv.sockets[0].getsockname()) loop.run_forever()
def main(args): """Main entry point allowing external calls Args: args ([str]): command line parameter list """ args = parse_args(args) setup_logging(args.loglevel) if args.generate_key: LOGGER.info("Generating a key pair") generate_keypair(args.print_key, args.key_path) return LOGGER.info("Loading configuration") config = Config(schema=get_defaults()) app['config'] = config if args.config_file is not None: LOGGER.debug("Loading config file '%s'", args.config_file) app['config'].yaml.load(args.config_file) if (not config.p2p.key.value) and args.key_path: LOGGER.debug("Loading key pair from file") with open(args.key_path, 'r') as key_file: config.p2p.key.value = key_file.read() if not config.p2p.key.value: LOGGER.critical("Node key cannot be empty") return if args.port: config.aleph.port.value = args.port if args.host: config.aleph.host.value = args.host if args.sentry_disabled: LOGGER.info("Sentry disabled by CLI arguments") elif app['config'].sentry.dsn.value: sentry_sdk.init( dsn=app['config'].sentry.dsn.value, traces_sample_rate=app['config'].sentry.traces_sample_rate.value, ignore_errors=[KeyboardInterrupt], ) LOGGER.info("Sentry enabled") config_values = config.dump_values() LOGGER.debug("Initializing database") model.init_db(config, ensure_indexes=(not args.debug)) LOGGER.info("Database initialized.") # filestore.init_store(config) # LOGGER.info("File store initalized.") init_cors() # FIXME: This is stateful and process-dependent set_start_method('spawn') manager = None if config.storage.engine.value == 'rocksdb': # rocksdb doesn't support multiprocess/multithread manager = prepare_manager(config_values) with Manager() as shared_memory_manager: tasks: List[Coroutine] = [] # This dictionary is shared between all the process so we can expose some internal stats # handle with care as it's shared between process. shared_stats = shared_memory_manager.dict() if not args.no_jobs: LOGGER.debug("Creating jobs") tasks += start_jobs(config, shared_stats=shared_stats, manager=manager, use_processes=True) loop = asyncio.get_event_loop() # handler = app.make_handler(loop=loop) LOGGER.debug("Initializing p2p") f = p2p.init_p2p(config) p2p_tasks = loop.run_until_complete(f) tasks += p2p_tasks LOGGER.debug("Initialized p2p") LOGGER.debug("Initializing listeners") tasks += listener_tasks(config) tasks += connector_tasks(config, outgoing=(not args.no_commit)) LOGGER.debug("Initialized listeners") # Need to be passed here otherwise it get lost in the fork from aleph.services.p2p import manager as p2p_manager extra_web_config = { 'public_adresses': p2p_manager.public_adresses } p1 = Process(target=run_server_coroutine, args=( config_values, config.p2p.host.value, config.p2p.http_port.value, manager and (manager._address, manager._authkey) or None, 3, shared_stats, args.sentry_disabled is False and app['config'].sentry.dsn.value, extra_web_config, )) p2 = Process(target=run_server_coroutine, args=( config_values, config.aleph.host.value, config.aleph.port.value, manager and (manager._address, manager._authkey) or None, 4, shared_stats, args.sentry_disabled is False and app['config'].sentry.dsn.value, extra_web_config )) p1.start() p2.start() LOGGER.debug("Started processes") # fp2p = loop.create_server(handler, # config.p2p.host.value, # config.p2p.http_port.value) # srvp2p = loop.run_until_complete(fp2p) # LOGGER.info('Serving on %s', srvp2p.sockets[0].getsockname()) # f = loop.create_server(handler, # config.aleph.host.value, # config.aleph.port.value) # srv = loop.run_until_complete(f) # LOGGER.info('Serving on %s', srv.sockets[0].getsockname()) LOGGER.debug("Running event loop") loop.run_until_complete(asyncio.gather(*tasks))
def main(args): """Main entry point allowing external calls Args: args ([str]): command line parameter list """ # uvloop.install() args = parse_args(args) if args.generate_key: setup_logging(logging.INFO) generate_keypair(print_info=True) return setup_logging(args.loglevel) LOGGER.info("Starting up.") config = Config(schema=get_defaults()) app['config'] = config config.aleph.port.value = args.port config.aleph.host.value = args.host if args.config_file is not None: app['config'].yaml.load(args.config_file) config_values = config.dump_values() model.init_db(config, ensure_indexes=(not args.debug)) LOGGER.info("Database initialized.") # filestore.init_store(config) # LOGGER.info("File store initalized.") init_cors() set_start_method('spawn') manager = None if config.storage.engine.value == 'rocksdb': # rocksdb doesn't support multiprocess/multithread manager = prepare_manager(config_values) if not args.no_jobs: start_jobs(config, manager=manager, use_processes=False) loop = asyncio.get_event_loop() # handler = app.make_handler(loop=loop) f = p2p.init_p2p(config) host = loop.run_until_complete(f) setup_listeners(config) start_connector(config, outgoing=(not args.no_commit)) p1 = Process(target=run_server, args=(config_values, config.p2p.host.value, config.p2p.http_port.value, manager and (manager._address, manager._authkey) or None, 3)) p2 = Process(target=run_server, args=(config_values, config.aleph.host.value, config.aleph.port.value, manager and (manager._address, manager._authkey) or None, 4)) p1.start() p2.start() # fp2p = loop.create_server(handler, # config.p2p.host.value, # config.p2p.http_port.value) # srvp2p = loop.run_until_complete(fp2p) # LOGGER.info('Serving on %s', srvp2p.sockets[0].getsockname()) # f = loop.create_server(handler, # config.aleph.host.value, # config.aleph.port.value) # srv = loop.run_until_complete(f) # LOGGER.info('Serving on %s', srv.sockets[0].getsockname()) loop.run_forever()