async def app(event_loop, test_ns, test_db, unused_tcp_port): """ For tests that do not require actual server running. """ app = web.Application(middlewares=[ exception_middleware, api_middleware, ]) app['config'] = load_config(argv=[], extra_args_funcs=(gw_args, )) app['config'].debug = True # Override basic settings. # Change these configs if local servers have different port numbers. app['config'].redis_addr = host_port_pair(os.environ['BACKEND_REDIS_ADDR']) app['config'].db_addr = host_port_pair(os.environ['BACKEND_DB_ADDR']) app['config'].db_name = test_db app['config'].docker_registry = 'lablup' # Override extra settings app['config'].namespace = test_ns app['config'].heartbeat_timeout = 10.0 app['config'].service_ip = '127.0.0.1' app['config'].service_port = unused_tcp_port app['config'].verbose = False # import ssl # app['config'].ssl_cert = here / 'sample-ssl-cert' / 'sample.crt' # app['config'].ssl_key = here / 'sample-ssl-cert' / 'sample.key' # app['sslctx'] = ssl.SSLContext(ssl.PROTOCOL_SSLv23) # app['sslctx'].load_cert_chain(str(app['config'].ssl_cert), # str(app['config'].ssl_key)) # num_workers = 1 app['pidx'] = 0 return app
async def app(event_loop, test_ns, test_db, unused_tcp_port_factory): """ For tests that do not require actual server running. """ app = web.Application(middlewares=[ exception_middleware, api_middleware, ]) app['config'] = load_config() # Override settings for testing. app['config']['db']['name'] = test_db app['config']['etcd']['namespace'] = test_ns app['config']['manager']['num-proc'] = 2 app['config']['manager']['heartbeat-timeout'] = 10.0 app['config']['manager']['service-addr'] = HostPortPair( '127.0.0.1', unused_tcp_port_factory()) # import ssl # app['config'].ssl_cert = here / 'sample-ssl-cert' / 'sample.crt' # app['config'].ssl_key = here / 'sample-ssl-cert' / 'sample.key' # app['sslctx'] = ssl.SSLContext(ssl.PROTOCOL_SSLv23) # app['sslctx'].load_cert_chain(str(app['config'].ssl_cert), # str(app['config'].ssl_key)) app['pidx'] = 0 return app
def main(ctx, config_path, debug): cfg = load_config(config_path) setproctitle(f"backend.ai: manager.cli {cfg['etcd']['namespace']}") if 'file' in cfg['logging']['drivers']: cfg['logging']['drivers'].remove('file') logger = Logger(cfg['logging']) ctx.obj = CLIContext( logger=logger, config=cfg, )
def test_args_parse_by_load_config(): # basic args agent_port = 6003 redis_addr = '127.0.0.1:6381' db_addr = '127.0.0.1:5434' db_name = 'backendai-test' db_user = '******' db_password = '******' # extra args namespace = 'local-test' etcd_addr = '127.0.0.1:2381' events_port = 5002 argv = [ '--agent-port', str(agent_port), '--redis-addr', redis_addr, '--db-addr', db_addr, '--db-name', db_name, '--db-user', db_user, '--db-password', db_password, '--namespace', namespace, '--etcd-addr', etcd_addr, '--events-port', str(events_port), ] args = load_config(argv, extra_args_funcs=(gw_args, )) assert args.agent_port == agent_port assert args.redis_addr == host_port_pair(redis_addr) assert args.db_addr == host_port_pair(db_addr) assert args.db_name == db_name assert args.db_user == db_user assert args.db_password == db_password assert args.namespace == namespace assert args.etcd_addr == host_port_pair(etcd_addr) assert args.events_port == port_no(events_port) assert args.docker_registry is None assert args.heartbeat_timeout == 5.0 assert args.service_ip == ip_address('0.0.0.0') assert args.service_port == 0
def local_config(test_id, test_db): cfg = load_config() assert isinstance(cfg, LocalConfig) cfg['db']['name'] = test_db cfg['manager']['num-proc'] = 1 cfg['manager']['service-addr'] = HostPortPair('localhost', 29100) # In normal setups, this is read from etcd. cfg['redis'] = redis_config_iv.check({ 'addr': { 'host': '127.0.0.1', 'port': '6379' }, }) return cfg
def main(ctx, config_path, debug): cfg = load_config(config_path) setproctitle(f"backend.ai: manager.cli {cfg['etcd']['namespace']}") if 'file' in cfg['logging']['drivers']: cfg['logging']['drivers'].remove('file') # log_endpoint = f'tcp://127.0.0.1:{find_free_port()}' log_sockpath = Path(f'/tmp/backend.ai/ipc/manager-cli-{os.getpid()}.sock') log_sockpath.parent.mkdir(parents=True, exist_ok=True) log_endpoint = f'ipc://{log_sockpath}' logger = Logger(cfg['logging'], is_master=True, log_endpoint=log_endpoint) ctx.obj = CLIContext( logger=logger, config=cfg, ) def _clean_logger(): try: os.unlink(log_sockpath) except FileNotFoundError: pass atexit.register(_clean_logger)
async def _create_server(loop, unused_port, extra_inits=None, debug=False): app = web.Application(loop=loop) app.config = load_config(argv=[], extra_args_func=gw_args) # Override default configs for testing setup. app.config.ssl_cert = here / 'sample-ssl-cert' / 'sample.crt' app.config.ssl_key = here / 'sample-ssl-cert' / 'sample.key' app.config.service_ip = '127.0.0.1' app.config.service_port = unused_port app.sslctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) app.sslctx.load_cert_chain(str(app.config.ssl_cert), str(app.config.ssl_key)) await gw_init(app) if extra_inits: for init in extra_inits: await init(app) handler = app.make_handler(debug=debug, keep_alive_on=False) server = await loop.create_server(handler, app.config.service_ip, app.config.service_port, ssl=app.sslctx) return app, app.config.service_port, handler, server
async def default_keypair(event_loop): access_key = 'AKIAIOSFODNN7EXAMPLE' config = load_config(argv=[], extra_args_func=gw_args) pool = await create_engine(host=config.db_addr[0], port=config.db_addr[1], user=config.db_user, password=config.db_password, dbname=config.db_name, minsize=1, maxsize=4) async with pool.acquire() as conn: query = (sa.select([ keypairs.c.access_key, keypairs.c.secret_key ]).select_from(keypairs).where(keypairs.c.access_key == access_key)) result = await conn.execute(query) row = await result.first() keypair = { 'access_key': access_key, 'secret_key': row.secret_key, } pool.close() await pool.wait_closed() return keypair
def prepare_and_cleanup_databases(request, test_ns, test_db, folder_mount, folder_host, folder_fsprefix): os.environ['BACKEND_NAMESPACE'] = test_ns os.environ['BACKEND_DB_NAME'] = test_db # Clear and reset etcd namespace using CLI functions. cfg = load_config() subprocess.call(['python', '-m', 'ai.backend.manager.cli', 'etcd', 'put', 'volumes/_mount', str(folder_mount)]) subprocess.call(['python', '-m', 'ai.backend.manager.cli', 'etcd', 'put', 'volumes/_fsprefix', str(folder_fsprefix)]) subprocess.call(['python', '-m', 'ai.backend.manager.cli', 'etcd', 'put', 'volumes/_default_host', str(folder_host)]) subprocess.call(['python', '-m', 'ai.backend.manager.cli', 'etcd', 'put', 'config/docker/registry/index.docker.io', 'https://registry-1.docker.io']) subprocess.call(['python', '-m', 'ai.backend.manager.cli', 'etcd', 'put', 'config/redis/addr', '127.0.0.1:8110']) # Add fake plugin settings. subprocess.call(['python', '-m', 'ai.backend.manager.cli', 'etcd', 'put', 'config/plugins/cloudia/base_url', '127.0.0.1:8090']) subprocess.call(['python', '-m', 'ai.backend.manager.cli', 'etcd', 'put', 'config/plugins/cloudia/user', '*****@*****.**']) subprocess.call(['python', '-m', 'ai.backend.manager.cli', 'etcd', 'put', 'config/plugins/cloudia/password', 'fake-password']) def finalize_etcd(): subprocess.call(['python', '-m', 'ai.backend.manager.cli', 'etcd', 'delete', '', '--prefix']) request.addfinalizer(finalize_etcd) # Create database using low-level psycopg2 API. db_addr = cfg['db']['addr'] db_user = cfg['db']['user'] db_pass = cfg['db']['password'] if db_pass: # TODO: escape/urlquote db_pass db_url = f'postgresql://{db_user}:{db_pass}@{db_addr}' else: db_url = f'postgresql://{db_user}@{db_addr}' conn = pg.connect(db_url) conn.set_isolation_level(pg.extensions.ISOLATION_LEVEL_AUTOCOMMIT) cur = conn.cursor() cur.execute(f'CREATE DATABASE "{test_db}";') cur.close() conn.close() def finalize_db(): conn = pg.connect(db_url) conn.set_isolation_level(pg.extensions.ISOLATION_LEVEL_AUTOCOMMIT) cur = conn.cursor() cur.execute(f'REVOKE CONNECT ON DATABASE "{test_db}" FROM public;') cur.execute('SELECT pg_terminate_backend(pid) FROM pg_stat_activity ' 'WHERE pid <> pg_backend_pid();') cur.execute(f'DROP DATABASE "{test_db}";') cur.close() conn.close() request.addfinalizer(finalize_db) # Load the database schema using CLI function. alembic_url = db_url + '/' + test_db with tempfile.NamedTemporaryFile(mode='w', encoding='utf8') as alembic_cfg: alembic_sample_cfg = here / '..' / 'alembic.ini.sample' alembic_cfg_data = alembic_sample_cfg.read_text() alembic_cfg_data = re.sub( r'^sqlalchemy.url = .*$', f'sqlalchemy.url = {alembic_url}', alembic_cfg_data, flags=re.M) alembic_cfg.write(alembic_cfg_data) alembic_cfg.flush() subprocess.call(['python', '-m', 'ai.backend.manager.cli', 'schema', 'oneshot', '-f', alembic_cfg.name]) # Populate example_keypair fixture fixtures = {} fixtures.update(json.loads( (Path(__file__).parent.parent / 'sample-configs' / 'example-keypairs.json').read_text())) fixtures.update(json.loads( (Path(__file__).parent.parent / 'sample-configs' / 'example-resource-presets.json').read_text())) engine = sa.create_engine(alembic_url) conn = engine.connect() populate_fixture(conn, fixtures) conn.close() engine.dispose()
from ai.backend.gateway.config import load_config, init_logger from ai.backend.manager import cli resolved_command_classes = {} def init_app_args(parser): cli.global_argparser = parser import ai.backend.manager.cli.fixture # noqa import ai.backend.manager.cli.dbschema # noqa import ai.backend.manager.cli.shell # noqa import ai.backend.manager.cli.etcd # noqa config = load_config(extra_args_func=init_app_args) init_logger(config) config.function(config)