def client( loop, aiohttp_client, app_config, ## waits until swarm with *_services are up ): assert app_config["rest"]["version"] == API_VERSION app_config["storage"]["enabled"] = False app_config["main"]["testing"] = True pprint(app_config) # fake config app = create_safe_application() app[APP_CONFIG_KEY] = app_config pprint(app_config) setup_db(app) setup_session(app) setup_security(app) setup_rest(app) setup_login(app) setup_projects(app) setup_computation(app) yield loop.run_until_complete( aiohttp_client( app, server_kwargs={ "port": app_config["main"]["port"], "host": app_config["main"]["host"], }, ))
def client(loop, aiohttp_unused_port, aiohttp_client, api_version_prefix): app = create_safe_application() MAX_DELAY_SECS_ALLOWED = 1 # secs async def slow_handler(request: web.Request): import time time.sleep(MAX_DELAY_SECS_ALLOWED * 1.1) raise web.HTTPOk() server_kwargs = {"port": aiohttp_unused_port(), "host": "localhost"} # fake config app[APP_CONFIG_KEY] = { "main": server_kwargs, "rest": { "enabled": True, "version": api_version_prefix }, } # activates only security+restAPI sub-modules setup_settings(app) setup_security(app) setup_rest(app) app.router.add_get("/slow", slow_handler) cli = loop.run_until_complete( aiohttp_client(app, server_kwargs=server_kwargs)) return cli
def create(config: Dict[str, Any]) -> web.Application: log.debug( "Initializing app with config:\n%s", json.dumps(config, indent=2, sort_keys=True), ) app = create_safe_application(config) tracing = config["tracing"]["enabled"] if tracing: setup_tracing( app, "simcore_service_storage", config["host"], config["port"], config["tracing"], ) setup_db(app) # -> postgres service setup_s3(app) # -> minio service setup_dsm(app) # core subsystem. Needs s3 and db setups done setup_rest(app) # lastly, we expose API to the world if config.get("monitoring_enabled", False): setup_monitoring(app, "simcore_service_storage") return app
def client( loop, aiohttp_client, app_config, ## waits until swarm with *_services are up rabbit_service: RabbitConfig, ## waits until rabbit is responsive postgres_db: sa.engine.Engine, ): assert app_config["rest"]["version"] == API_VERSION app_config["storage"]["enabled"] = False # fake config app = create_safe_application() app[APP_CONFIG_KEY] = app_config setup_db(app) setup_session(app) setup_security(app) setup_rest(app) setup_login(app) setup_projects(app) setup_computation(app) setup_director_v2(app) setup_socketio(app) setup_resource_manager(app) yield loop.run_until_complete( aiohttp_client( app, server_kwargs={ "port": app_config["main"]["port"], "host": app_config["main"]["host"], }, ))
def client(loop, aiohttp_client, app_cfg, postgres_service, qx_client_outdir, monkeypatch): # def client(loop, aiohttp_client, app_cfg, qx_client_outdir, monkeypatch): # <<<< FOR DEVELOPMENT. DO NOT REMOVE. cfg = deepcopy(app_cfg) cfg["db"][ "init_tables"] = True # inits tables of postgres_service upon startup cfg["projects"]["enabled"] = True cfg["storage"]["enabled"] = False cfg["rabbit"]["enabled"] = False app = create_safe_application(cfg) setup_statics(app) setup_db(app) setup_session(app) setup_security(app) setup_rest(app) # TODO: why should we need this?? setup_login(app) setup_users(app) assert setup_projects(app), "Shall not skip this setup" assert setup_studies_access(app), "Shall not skip this setup" # server and client yield loop.run_until_complete( aiohttp_client( app, server_kwargs={ "port": cfg["main"]["port"], "host": cfg["main"]["host"] }, ))
def client( loop, mock_orphaned_services, aiohttp_client, app_config, ## waits until swarm with *_services are up ): assert app_config["rest"]["version"] == API_VERSION app_config["main"]["testing"] = True app_config["db"]["init_tables"] = True app_config["storage"]["enabled"] = False app_config["rabbit"]["enabled"] = False pprint(app_config) app = create_safe_application(app_config) setup_db(app) setup_session(app) setup_security(app) setup_rest(app) setup_login(app) setup_resource_manager(app) assert setup_projects(app) yield loop.run_until_complete( aiohttp_client( app, server_kwargs={ "port": app_config["main"]["port"], "host": app_config["main"]["host"], }, ))
def client(loop, aiohttp_client, app_cfg, postgres_service, mock_orphaned_services): cfg = deepcopy(app_cfg) assert cfg["rest"]["version"] == API_VERSION assert cfg["rest"]["enabled"] cfg["db"]["init_tables"] = True # inits postgres_service cfg["projects"]["enabled"] = True cfg["director"]["enabled"] = True cfg[config.CONFIG_SECTION_NAME][ "garbage_collection_interval_seconds"] = GARBAGE_COLLECTOR_INTERVAL # increase speed of garbage collection # fake config app = create_safe_application(cfg) # activates only security+restAPI sub-modules setup_db(app) setup_session(app) setup_security(app) setup_rest(app) setup_login(app) setup_users(app) setup_sockets(app) setup_projects(app) setup_director(app) assert setup_resource_manager(app) yield loop.run_until_complete( aiohttp_client( app, server_kwargs={ "port": cfg["main"]["port"], "host": cfg["main"]["host"] }, ))
def client(loop, aiohttp_client, app_cfg, postgres_db, qx_client_outdir, mocks_on_projects_api): cfg = deepcopy(app_cfg) cfg["projects"]["enabled"] = True cfg["storage"]["enabled"] = False cfg["rabbit"]["enabled"] = False cfg["main"]["client_outdir"] = qx_client_outdir app = create_safe_application(cfg) setup_settings(app) setup_statics(app) setup_db(app) setup_session(app) setup_security(app) setup_rest(app) # TODO: why should we need this?? setup_login(app) setup_users(app) setup_products(app) assert setup_projects(app), "Shall not skip this setup" assert setup_studies_access(app), "Shall not skip this setup" # server and client yield loop.run_until_complete( aiohttp_client( app, server_kwargs={ "port": cfg["main"]["port"], "host": cfg["main"]["host"] }, ))
def client(loop, aiohttp_client, app_config, mock_orphaned_services): app = create_safe_application(app_config) setup_session(app) setup_security(app) setup_rest(app) setup_activity(app) cli = loop.run_until_complete(aiohttp_client(app)) return cli
def client(loop, aiohttp_client): cfg = load_default_config() app = create_safe_application(cfg) app[APP_OPENAPI_SPECS_KEY] = load_openapi_specs() setup_catalog.__wrapped__(app, disable_auth=True) # needs to start application ... yield loop.run_until_complete(aiohttp_client(app))
def create_application(config: Dict[str, Any]) -> web.Application: """ Initializes service """ log.debug( "Initializing app with config:\n%s", json.dumps(config, indent=2, sort_keys=True), ) app = create_safe_application(config) setup_settings(app) # TODO: create dependency mechanism # and compute setup order https://github.com/ITISFoundation/osparc-simcore/issues/1142 setup_app_tracing(app) setup_statics(app) setup_db(app) setup_session(app) setup_security(app) setup_rest(app) setup_diagnostics(app) setup_email(app) setup_computation(app) setup_socketio(app) setup_login(app) setup_director(app) setup_director_v2(app) setup_storage(app) setup_users(app) setup_groups(app) setup_projects(app) setup_activity(app) setup_resource_manager(app) setup_tags(app) setup_catalog(app) setup_publications(app) setup_products(app) setup_studies_access(app) setup_studies_dispatcher(app) return app
def create_backend_app(name, image, basepath): async def handler(request: web.Request): """ Echos back received info + its name """ body = await request.text() return web.json_response({ "name": name, "image": image, "received": { "method": request.method, "url": str(request.url), "body": body, "proxy_path": request.match_info.get("proxy_path", ""), }, }) app = create_safe_application() app.router.add_route("*", basepath + "/{proxy_path:.*}", handler) return app
def client( loop, aiohttp_client, app_cfg, postgres_service, mocked_director_subsystem, mock_orphaned_services, ): # def client(loop, aiohttp_client, app_cfg): # <<<< FOR DEVELOPMENT. DO NOT REMOVE. # config app cfg = deepcopy(app_cfg) port = cfg["main"]["port"] cfg["db"]["init_tables"] = True # inits tables of postgres_service upon startup cfg["projects"]["enabled"] = True cfg["director"]["enabled"] = True cfg["resource_manager"][ "garbage_collection_interval_seconds" ] = 3 # increase speed of garbage collection cfg["resource_manager"][ "resource_deletion_timeout_seconds" ] = 3 # reduce deletion delay app = create_safe_application(cfg) # setup app setup_db(app) setup_session(app) setup_security(app) setup_rest(app) setup_login(app) # needed for login_utils fixtures setup_resource_manager(app) setup_sockets(app) setup_director(app) setup_tags(app) assert setup_projects(app) # server and client yield loop.run_until_complete( aiohttp_client(app, server_kwargs={"port": port, "host": "localhost"}) )
def client( loop, aiohttp_client, app_config, postgres_with_template_db, mock_orphaned_services ): cfg = deepcopy(app_config) assert cfg["rest"]["version"] == API_VERSION assert cfg["rest"]["enabled"] cfg["projects"]["enabled"] = True cfg["director"]["enabled"] = True cfg["resource_manager"][ "garbage_collection_interval_seconds" ] = GARBAGE_COLLECTOR_INTERVAL # increase speed of garbage collection cfg["resource_manager"][ "resource_deletion_timeout_seconds" ] = SERVICE_DELETION_DELAY # reduce deletion delay # fake config app = create_safe_application(cfg) # activates only security+restAPI sub-modules setup_db(app) setup_session(app) setup_security(app) setup_rest(app) setup_login(app) setup_users(app) setup_socketio(app) setup_projects(app) setup_director(app) setup_director_v2(app) assert setup_resource_manager(app) yield loop.run_until_complete( aiohttp_client( app, server_kwargs={"port": cfg["main"]["port"], "host": cfg["main"]["host"]}, ) )
def client(loop, aiohttp_client, app_cfg, postgres_service): cfg = deepcopy(app_cfg) port = cfg["main"]["port"] assert cfg["rest"]["version"] == API_VERSION cfg["db"]["init_tables"] = True # inits postgres_service # fake config app = create_safe_application(cfg) setup_db(app) setup_session(app) setup_security(app) setup_rest(app) setup_login(app) setup_users(app) client = loop.run_until_complete( aiohttp_client(app, server_kwargs={"port": port, "host": "localhost"}) ) return client
def client( loop, aiohttp_client, app_config, ## waits until swarm with *_services are up rabbit_config: Config, rabbit_service, ## waits until rabbit is responsive ): assert app_config["rest"]["version"] == API_VERSION app_config["storage"]["enabled"] = False app_config["db"]["init_tables"] = True # inits postgres_service app_config[CONFIG_SECTION_NAME] = rabbit_config.dict() # fake config app = create_safe_application() app[APP_CONFIG_KEY] = app_config setup_db(app) setup_session(app) setup_security(app) setup_rest(app) setup_login(app) setup_projects(app) setup_computation(app) setup_sockets(app) setup_resource_manager(app) yield loop.run_until_complete( aiohttp_client( app, server_kwargs={ "port": app_config["main"]["port"], "host": app_config["main"]["host"], }, ) )
def moduleless_app(loop, aiohttp_server) -> web.Application: app: web.Application = create_safe_application() # creates a dummy server server = loop.run_until_complete(aiohttp_server(app)) # server is destroyed on exit https://docs.aiohttp.org/en/stable/testing.html#pytest_aiohttp.aiohttp_server return app
def storage_server(loop, aiohttp_server, app_cfg): cfg = app_cfg["storage"] app = create_safe_application(cfg) async def _get_locs(request: web.Request): assert not request.has_body query = request.query assert query assert "user_id" in query assert query["user_id"], "Expected user id" return web.json_response({"data": [{"user_id": int(query["user_id"])},]}) async def _get_filemeta(request: web.Request): assert not request.has_body query = request.query assert query assert "user_id" in query assert query["user_id"], "Expected user id" return web.json_response({"data": [{"filemeta": 42},]}) async def _get_filtered_list(request: web.Request): assert not request.has_body query = request.query assert query assert "user_id" in query assert query["user_id"], "Expected user id" assert query["uuid_filter"], "expected a filter" return web.json_response({"data": [{"uuid_filter": query["uuid_filter"]},]}) async def _get_datasets(request: web.Request): assert not request.has_body query = request.query assert query assert "user_id" in query assert query["user_id"], "Expected user id" return web.json_response( {"data": [{"dataset_id": "asdf", "display_name": "bbb"},]} ) async def _get_datasets_meta(request: web.Request): assert not request.has_body query = request.query assert query assert "user_id" in query assert query["user_id"], "Expected user id" return web.json_response( {"data": [{"dataset_id": "asdf", "display_name": "bbb"},]} ) storage_api_version = cfg["version"] assert ( storage_api_version != API_VERSION ), "backend service w/ different version as webserver entrypoint" app.router.add_get(f"/{storage_api_version}/locations", _get_locs) app.router.add_get( f"/{storage_api_version}/locations/0/files/{{file_id}}/metadata", _get_filemeta ) app.router.add_get( f"/{storage_api_version}/locations/0/files/metadata", _get_filtered_list ) app.router.add_get(f"/{storage_api_version}/locations/0/datasets", _get_datasets) app.router.add_get( f"/{storage_api_version}/locations/0/datasets/{{dataset_id}}/metadata", _get_datasets_meta, ) assert cfg["host"] == "localhost" server = loop.run_until_complete(aiohttp_server(app, port=cfg["port"])) return server
def reverse_proxy_server(loop, aiohttp_server, spawner_client): """ Application with reverse_proxy.setup (emulates webserver) """ @attr.s(auto_attribs=True) class ServiceMonitor(ServiceResolutionPolicy): cli: DTestClient = None # override async def get_image_name(self, service_identifier: str) -> str: res = await self.cli.get("/services/%s" % service_identifier) info = await res.json() return info["image"] # override async def find_url(self, service_identifier: str) -> URL: res = await self.cli.get("/services/%s" % service_identifier) info = await res.json() return info["url"] app = create_safe_application({"reverse_proxy": {"enabled": True}}) # setup app["director.client"] = spawner_client monitor = ServiceMonitor(app["director.client"]) # adds /x/ to router setup_reverse_proxy(app, monitor) app["reverse_proxy.basemount"] = monitor.base_mountpoint url = app.router["reverse_proxy"].url_for( serviceId="foo", proxyPath="bar") # <-- another way to "publish", with a named-resouce assert url == URL(app["reverse_proxy.basemount"] + "/foo/bar") # adds api async def bypass(req: web.Request): """ bypasses traffic to spawmer """ # /services/{serviceId}?action=xxx -> /services/{serviceId}/{action} method, path, body = ( req.method, join(req.path, req.query.get("action", "")).rstrip("/"), None, ) if method != "GET": body = await req.json() body["basepath"] = req.app["reverse_proxy.basemount"] cli = req.app["director.client"] # mini-reverse proxy ---- res = await cli.request(method, path, json=body) assert isinstance(res, ClientResponse), "NOTE: %s" % type(res) response = web.StreamResponse(status=res.status, headers=res.headers) await response.prepare(req) payload = await res.read() await response.write_eof(payload) return response # ------------------------- # raise web.HTTPServiceUnavailable(reason="Cannot talk to spawner", # content_type="application/json") # API: /services/{serviceId}?action=xxx app.router.add_get("/services", bypass) app.router.add_post("/services", bypass) app.router.add_get("/services/{serviceId}", bypass) return loop.run_until_complete(aiohttp_server(app))
def spawner_server(loop, aiohttp_server): """ Spawns backend services (emulates director) """ # uses mountpoint as a unique identifier registry = {} # registry[mountpoint] -> {info:{}, server:} async def list_infos(reg: web.Request): return web.json_response([v["info"] for v in registry.values()]) async def info(req: web.Request): serviceid = req.match_info.get("serviceId") for mountpoint, item in registry.items(): if item["info"]["id"] == serviceid: return web.json_response(registry[mountpoint]["info"]) raise web.HTTPServiceUnavailable( reason="Service {} is not running".format(serviceid), content_type="application/json", ) async def start(req: web.Request): # client requests to run image in basepath data = await req.json() image = data["image"] basepath = data["basepath"] # corresponds to the PROXY_MOUNTPOINT # given or auto-generated here. Unique. serviceid = name = data.get("name", random_name()) # settings = data["settings"] image specific settings/config # determines unique mountpoint mountpoint = "{}/{}".format(basepath, serviceid) if mountpoint not in registry: server = await aiohttp_server( create_backend_app(name, image, mountpoint)) registry[mountpoint] = { "server": server, "info": { "name": name, "image": image, "mountpoint": mountpoint, "id": serviceid, "url": str( URL.build( scheme=server.scheme, host=server.host, port=server.port, path=mountpoint, )), }, } # produces an identifier return web.json_response(registry[mountpoint]["info"]) async def stop(req: web.Request): serviceid = req.match_info.get("serviceId") info = {"id": serviceid} # determines unique mountpoint for mountpoint, item in registry.items(): if item["info"]["id"] == serviceid: print("stopping %s ...", item["info"]) service = registry[mountpoint]["server"] await service.close() info = registry.pop(mountpoint)["info"] break return web.json_response(info) app = create_safe_application() # API app.router.add_get("/services", list_infos) app.router.add_get("/services/{serviceId}", info) app.router.add_post("/services/start", start) # /services/?action=start # servjces/?action=stop app.router.add_get("/services/{serviceId}/stop", stop) return loop.run_until_complete(aiohttp_server(app))
def client(loop, aiohttp_unused_port, aiohttp_client, api_version_prefix): SLOW_HANDLER_DELAY_SECS = 1.0 # secs # pylint:disable=unused-variable routes = web.RouteTableDef() @routes.get("/error") async def unexpected_error(request: web.Request): raise Exception("boom shall produce 500") @routes.get(r"/fail") async def expected_failure(request: web.Request): raise web.HTTPServiceUnavailable() @routes.get(r"/slow") async def blocking_slow(request: web.Request): time.sleep(SLOW_HANDLER_DELAY_SECS * 1.1) return web.json_response({"data": True, "error": None}) @routes.get(r"/cancel") async def cancelled_task(request: web.Request): task: asyncio.Task = request.app.loop.create_task(asyncio.sleep(10)) task.cancel() # raise CancelledError @routes.get(r"/timeout/{secs}") async def time_out(request: web.Request): secs = float(request.match_info.get("secs", 0)) await asyncio.wait_for(asyncio.sleep(10 * secs), timeout=secs) # raises TimeOutError @routes.get(r"/delay/{secs}") async def delay_response(request: web.Request): secs = float(request.match_info.get("secs", 0)) await asyncio.sleep(secs) # non-blocking slow return web.json_response({"data": True, "error": None}) # ----- app = create_safe_application() main = {"port": aiohttp_unused_port(), "host": "localhost"} app[APP_CONFIG_KEY] = { "main": main, "rest": { "enabled": True, "version": api_version_prefix }, "diagnostics": { "enabled": True }, } # activates some sub-modules setup_security(app) setup_rest(app) setup_diagnostics( app, slow_duration_secs=SLOW_HANDLER_DELAY_SECS / 10, max_task_delay=SLOW_HANDLER_DELAY_SECS, max_avg_response_latency=2.0, start_sensing_delay=0 # inmidiately! ) assert app[kMAX_AVG_RESP_LATENCY] == 2.0 app.router.add_routes(routes) cli = loop.run_until_complete( aiohttp_client( app, server_kwargs={key: main[key] for key in ("host", "port")})) return cli