Exemplo n.º 1
0
 async def put_config_validation(self, request: Request) -> StreamResponse:
     config_id = request.match_info["config_id"]
     js = await self.json_from_request(request)
     js["id"] = config_id
     config_model = from_js(js, ConfigValidation)
     model = await self.config_handler.put_config_validation(config_model)
     return await single_result(request, to_js(model))
Exemplo n.º 2
0
def test_parse_broken(config_json: Json) -> None:
    # config_json is a valid parsable config
    cfg = deepcopy(config_json)

    # adjust the config: rename web_hosts -> hosts, and web_port -> port
    hosts = cfg["resotocore"]["api"]["web_hosts"]
    port = cfg["resotocore"]["api"]["web_port"]
    cfg["resotocore"]["api"]["hosts"] = hosts
    cfg["resotocore"]["api"]["port"] = port
    del cfg["resotocore"]["api"]["web_hosts"]
    del cfg["resotocore"]["api"]["web_port"]

    # parse this configuration
    parsed = parse_config(parse_args(["--analytics-opt-out"]), cfg)
    parsed_json = to_js(parsed.editable, strip_attr="kind")

    # web_hosts and web_port were not available and are reverted to the default values
    default = EditableConfig()
    assert parsed.api.web_hosts != hosts
    assert parsed.api.web_hosts == default.api.web_hosts
    assert parsed.api.web_port != port
    assert parsed.api.web_port == default.api.web_port

    # other config values are still unchanged
    assert parsed_json["cli"] == config_json["resotocore"]["cli"]
    assert parsed_json["runtime"] == config_json["resotocore"]["runtime"]
    assert parsed_json["graph_update"] == config_json["resotocore"][
        "graph_update"]
Exemplo n.º 3
0
 def to_json(o: Job, **_: object) -> Json:
     wait = {
         "wait_trigger": to_js(o.wait[0]),
         "wait_timeout": to_json(o.wait[1])
     } if o.wait else {}
     env = {"environment": o.environment} if o.environment else {}
     return {
         "id": o.id,
         "name": o.name,
         "command": to_js(o.command),
         "trigger": to_js(o.trigger),
         "timeout": to_json(o.timeout),
         "active": o.active,
         **env,
         **wait,
     }
Exemplo n.º 4
0
 async def update_configs_model(self, update: List[Kind]) -> Model:
     async with self.session.patch(self.base_path + "/configs/model",
                                   json=to_js(update)) as response:
         model_json = await response.json()
         model = Model.from_kinds([
             from_js(kind, Kind) for kind in model_json["kinds"].values()
         ])  # type: ignore
         return model
Exemplo n.º 5
0
 def cmd_json(cmd: CLICommand) -> Json:
     return {
         "name": cmd.name,
         "info": cmd.info(),
         "help": cmd.help(),
         "args": to_js(cmd.args_info(), force_dict=True),
         "source": cmd.allowed_in_source_position,
     }
Exemplo n.º 6
0
 async def merge_graph(self, request: Request) -> StreamResponse:
     log.info("Received merge_graph request")
     graph_id = request.match_info.get("graph_id", "resoto")
     db = self.db.get_graph_db(graph_id)
     it = self.to_line_generator(request)
     info = await merge_graph_process(db, self.event_sender, self.args, it,
                                      self.merge_max_wait_time, None)
     return web.json_response(to_js(info))
Exemplo n.º 7
0
 async def update_subscriber(
         self, uid: str,
         subscriptions: List[Subscription]) -> Optional[Subscriber]:
     async with self.session.put(self.base_path + f"/subscriber/{uid}",
                                 json=to_js(subscriptions)) as r:
         if r.status == 200:
             return from_js(await r.json(), Subscriber)
         else:
             raise AttributeError(await r.text())
Exemplo n.º 8
0
 def insert_system_data() -> SystemData:
     system = SystemData(uuid_str(), utc(), 1)
     log.info(f"Create new system data entry: {system}")
     db.insert_document("system_data", {
         "_key": "system",
         **to_js(system)
     },
                        overwrite=True)
     return system
Exemplo n.º 9
0
 async def add_subscription(self, request: Request) -> StreamResponse:
     subscriber_id = request.match_info["subscriber_id"]
     event_type = request.match_info["event_type"]
     timeout = timedelta(seconds=int(request.query.get("timeout", "60")))
     wait_for_completion = request.query.get("wait_for_completion",
                                             "true").lower() != "false"
     sub = await self.subscription_handler.add_subscription(
         subscriber_id, event_type, wait_for_completion, timeout)
     return await single_result(request, to_js(sub))
Exemplo n.º 10
0
def test_json_marshalling_works() -> None:
    m = ModelFoo(1, "some foo", 23)
    js = to_js(m)
    js["identity"] = 1
    js["a"] = "some foo"
    js["b"] = 23
    again = from_js(js, ModelFoo)
    d = DeepDiff(m, again, truncate_datetime="second")
    assert len(d) == 0
Exemplo n.º 11
0
async def test_merge_process(event_sender: AnalyticsEventSender,
                             graph_db: ArangoGraphDB,
                             foo_kinds: List[Kind]) -> None:
    # set explicitly (is done in main explicitly as well)
    set_start_method("spawn")

    # wipe any existing data
    await graph_db.wipe()
    # store the model in db, so it can be loaded by the sub process
    graph_db.db.collection("model").insert_many([to_js(a) for a in foo_kinds])
    # define args to parse for the sub process
    config = empty_config([
        "--graphdb-username", "test", "--graphdb-password", "test",
        "--graphdb-database", "test"
    ])
    # create sample graph data to insert
    graph = create_graph("test")

    await outer_edge_db(graph_db.db,
                        "deferred_outer_edges").create_update_schema()

    async def iterator() -> AsyncGenerator[bytes, None]:
        for node in graph.nodes():
            yield bytes(json.dumps(graph.nodes[node]), "utf-8")
        for from_node, to_node, data in graph.edges(data=True):
            yield bytes(
                json.dumps({
                    "from": from_node,
                    "to": to_node,
                    "edge_type": data["edge_type"]
                }), "utf-8")
        yield bytes(
            json.dumps({
                "from_selector": {
                    "node_id": "id_123"
                },
                "to_selector": {
                    "node_id": "id_456"
                },
                "edge_type": "delete"
            }),
            "utf-8",
        )

    result = await merge_graph_process(graph_db, event_sender, config,
                                       iterator(), timedelta(seconds=30), None,
                                       TaskId("test_task_123"))
    assert result == GraphUpdate(112, 1, 0, 212, 0, 0)
    elem = graph_db.db.collection("deferred_outer_edges").all().next()
    assert elem["_key"] == "test_task_123"
    assert elem["task_id"] == "test_task_123"
    assert elem["edges"][0] == {
        "from_node": "id_123",
        "to_node": "id_456",
        "edge_type": "delete"
    }
Exemplo n.º 12
0
 def to_json(o: Workflow, **_: object) -> Json:
     env = {"environment": o.environment} if o.environment else {}
     return {
         "id": o.id,
         "name": o.name,
         "steps": to_json(o.steps),
         "triggers": to_json(o.triggers),
         "on_surpass": to_js(o.on_surpass),
         **env,
     }
Exemplo n.º 13
0
    async def update_state(self, wi: RunningTask,
                           message: Optional[Message]) -> None:
        bind = {
            "id": f"{self.collection_name}/{wi.id}",
            "current_state_name": wi.current_state.name,
            "current_state_snapshot": wi.current_state.export_state(),
        }
        if message:
            bind["message"] = to_js(message)
            aql = self.__update_state_with_message()
        else:
            aql = self.__update_state()

        await self.db.aql(aql, bind_vars=bind)
Exemplo n.º 14
0
async def test_workflows_command(cli: CLI, task_handler: TaskHandlerService,
                                 test_workflow: Workflow) -> None:
    async def execute(cmd: str) -> List[JsonElement]:
        ctx = CLIContext(cli.cli_env)
        return (await cli.execute_cli_command(cmd, stream.list,
                                              ctx))[0]  # type: ignore

    assert await execute("workflows list") == ["test_workflow"]
    assert await execute("workflows show test_workflow") == [
        to_js(test_workflow)
    ]
    wf = await execute("workflows run test_workflow")
    assert wf[0].startswith(
        "Workflow test_workflow started with id")  # type: ignore
    assert len(await execute("workflows running")) == 1
Exemplo n.º 15
0
async def core_client(
    client_session: ClientSession, foo_kinds: List[Kind], test_db: StandardDatabase
) -> AsyncIterator[ApiClient]:
    """
    Note: adding this fixture to a test: a complete resotocore process is started.
          The fixture ensures that the underlying process has entered the ready state.
          It also ensures to clean up the process, when the test is done.
    """

    # wipe and cleanly import the test model
    test_db.collection("model").truncate()
    test_db.collection("model").insert_many([{"_key": elem.fqn, **to_js(elem)} for elem in foo_kinds])

    process = Process(
        target=run,
        args=(
            [
                "--graphdb-database",
                "test",
                "--graphdb-username",
                "test",
                "--graphdb-password",
                "test",
                "--debug",
                "--no-tls",
            ],
        ),
    )
    process.start()
    ready = False
    count = 10
    while not ready:
        await sleep(0.5)
        with suppress(Exception):
            async with client_session.get("http://localhost:8900/system/ready"):
                ready = True
                count -= 1
                if count == 0:
                    raise AssertionError("Process does not came up as expected")
    yield ApiClient("http://localhost:8900", None)
    # terminate the process
    process.terminate()
    process.join(5)
    # if it is still running, kill it
    if process.is_alive():
        process.kill()
        process.join()
    process.close()
Exemplo n.º 16
0
async def test_merge_process(
    event_sender: AnalyticsEventSender, graph_db: ArangoGraphDB, foo_kinds: List[Kind]
) -> None:
    # set explicitly (is done in main explicitly as well)
    set_start_method("spawn")

    # wipe any existing data
    await graph_db.wipe()
    # store the model in db, so it can be loaded by the sub process
    graph_db.db.collection("model").insert_many([to_js(a) for a in foo_kinds])
    # define args to parse for the sub process
    args = parse_args(["--graphdb-username", "test", "--graphdb-password", "test", "--graphdb-database", "test"])
    # create sample graph data to insert
    graph = create_graph("test")

    async def iterator() -> AsyncGenerator[bytes, None]:
        for node in graph.nodes():
            yield bytes(json.dumps(graph.nodes[node]), "utf-8")
        for from_node, to_node, data in graph.edges(data=True):
            yield bytes(json.dumps({"from": from_node, "to": to_node, "edge_type": data["edge_type"]}), "utf-8")

    result = await merge_graph_process(graph_db, event_sender, args, iterator(), timedelta(seconds=30), None)
    assert result == GraphUpdate(112, 1, 0, 212, 0, 0)
Exemplo n.º 17
0
 def to_doc(self, elem: T) -> Json:
     js = to_js(elem)
     js["_key"] = self.key_of(elem)
     return js
def test_config_entity_roundtrip() -> None:
    entity = ConfigEntity(ConfigId("test"), {"test": 1}, "test")
    again = from_js(to_js(entity), ConfigEntity)
    assert entity == again
Exemplo n.º 19
0
 async def explain(self, request: Request) -> StreamResponse:
     graph_db, query_model = await self.graph_query_model_from_request(
         request)
     result = await graph_db.explain(query_model)
     return web.json_response(to_js(result))
Exemplo n.º 20
0
 async def update_model(self, request: Request) -> StreamResponse:
     js = await self.json_from_request(request)
     kinds: List[Kind] = from_js(js, List[Kind])
     model = await self.model_handler.update_model(kinds)
     return await single_result(request, to_js(model))
Exemplo n.º 21
0
 async def get_model(self, request: Request) -> StreamResponse:
     md = await self.model_handler.load_model()
     return await single_result(request, to_js(md))
Exemplo n.º 22
0
 async def delete_subscription(self, request: Request) -> StreamResponse:
     subscriber_id = request.match_info["subscriber_id"]
     event_type = request.match_info["event_type"]
     sub = await self.subscription_handler.remove_subscription(
         subscriber_id, event_type)
     return await single_result(request, to_js(sub))
Exemplo n.º 23
0
class Api:
    def __init__(
        self,
        db: DbAccess,
        model_handler: ModelHandler,
        subscription_handler: SubscriptionHandler,
        workflow_handler: TaskHandlerService,
        message_bus: MessageBus,
        event_sender: AnalyticsEventSender,
        worker_task_queue: WorkerTaskQueue,
        cert_handler: CertificateHandler,
        config_handler: ConfigHandler,
        cli: CLI,
        query_parser: QueryParser,
        config: CoreConfig,
    ):
        self.db = db
        self.model_handler = model_handler
        self.subscription_handler = subscription_handler
        self.workflow_handler = workflow_handler
        self.message_bus = message_bus
        self.event_sender = event_sender
        self.worker_task_queue = worker_task_queue
        self.cert_handler = cert_handler
        self.config_handler = config_handler
        self.cli = cli
        self.query_parser = query_parser
        self.config = config
        self.app = web.Application(
            # note on order: the middleware is passed in the order provided.
            middlewares=[
                metrics_handler,
                auth_handler(config.args.psk, AlwaysAllowed),
                cors_handler,
                error_handler(config, event_sender),
                default_middleware(self),
            ])
        self.app.on_response_prepare.append(on_response_prepare)
        self.session: Optional[ClientSession] = None
        self.in_shutdown = False
        self.websocket_handler: Dict[str, Tuple[Future[Any],
                                                WebSocketResponse]] = {}
        path_part = config.api.web_path.strip().strip("/").strip()
        web_path = "" if path_part == "" else f"/{path_part}"
        self.__add_routes(web_path)

    def __add_routes(self, prefix: str) -> None:
        static_path = os.path.abspath(os.path.dirname(__file__) + "/../static")
        ui_route: List[AbstractRouteDef] = (
            [web.static(f"{prefix}/ui/", self.config.api.ui_path)] if
            self.config.api.ui_path and Path(self.config.api.ui_path).exists()
            else [web.get(f"{prefix}/ui/index.html", self.no_ui)])
        tsdb_route = ([
            web.route(METH_ANY, prefix + "/tsdb/{tail:.+}", tsdb(self))
        ] if self.config.api.tsdb_proxy_url else [])
        self.app.add_routes([
            # Model operations
            web.get(prefix + "/model", self.get_model),
            web.get(prefix + "/model/uml", self.model_uml),
            web.patch(prefix + "/model", self.update_model),
            # CRUD Graph operations
            web.get(prefix + "/graph", self.list_graphs),
            web.get(prefix + "/graph/{graph_id}", self.get_node),
            web.post(prefix + "/graph/{graph_id}", self.create_graph),
            web.delete(prefix + "/graph/{graph_id}", self.wipe),
            # search the graph
            web.post(prefix + "/graph/{graph_id}/search/raw", self.raw),
            web.post(prefix + "/graph/{graph_id}/search/explain",
                     self.explain),
            web.post(prefix + "/graph/{graph_id}/search/list",
                     self.query_list),
            web.post(prefix + "/graph/{graph_id}/search/graph",
                     self.query_graph_stream),
            web.post(prefix + "/graph/{graph_id}/search/aggregate",
                     self.query_aggregation),
            # maintain the graph
            web.patch(prefix + "/graph/{graph_id}/nodes", self.update_nodes),
            web.post(prefix + "/graph/{graph_id}/merge", self.merge_graph),
            web.post(prefix + "/graph/{graph_id}/batch/merge",
                     self.update_merge_graph_batch),
            web.get(prefix + "/graph/{graph_id}/batch", self.list_batches),
            web.post(prefix + "/graph/{graph_id}/batch/{batch_id}",
                     self.commit_batch),
            web.delete(prefix + "/graph/{graph_id}/batch/{batch_id}",
                       self.abort_batch),
            # node specific actions
            web.post(
                prefix +
                "/graph/{graph_id}/node/{node_id}/under/{parent_node_id}",
                self.create_node),
            web.get(prefix + "/graph/{graph_id}/node/{node_id}",
                    self.get_node),
            web.patch(prefix + "/graph/{graph_id}/node/{node_id}",
                      self.update_node),
            web.delete(prefix + "/graph/{graph_id}/node/{node_id}",
                       self.delete_node),
            web.patch(
                prefix + "/graph/{graph_id}/node/{node_id}/section/{section}",
                self.update_node),
            # Subscriptions
            web.get(prefix + "/subscribers", self.list_all_subscriptions),
            web.get(prefix + "/subscribers/for/{event_type}",
                    self.list_subscription_for_event),
            # Subscription
            web.get(prefix + "/subscriber/{subscriber_id}",
                    self.get_subscriber),
            web.put(prefix + "/subscriber/{subscriber_id}",
                    self.update_subscriber),
            web.delete(prefix + "/subscriber/{subscriber_id}",
                       self.delete_subscriber),
            web.post(prefix + "/subscriber/{subscriber_id}/{event_type}",
                     self.add_subscription),
            web.delete(prefix + "/subscriber/{subscriber_id}/{event_type}",
                       self.delete_subscription),
            web.get(prefix + "/subscriber/{subscriber_id}/handle",
                    self.handle_subscribed),
            # CLI
            web.post(prefix + "/cli/evaluate", self.evaluate),
            web.post(prefix + "/cli/execute", self.execute),
            web.get(prefix + "/cli/info", self.cli_info),
            # Event operations
            web.get(prefix + "/events", self.handle_events),
            # Worker operations
            web.get(prefix + "/work/queue", self.handle_work_tasks),
            web.get(prefix + "/work/create", self.create_work),
            web.get(prefix + "/work/list", self.list_work),
            # Serve static filed
            web.get(prefix, self.redirect_to_api_doc),
            web.static(prefix + "/static", static_path),
            # metrics
            web.get(prefix + "/metrics", self.metrics),
            # config operations
            web.get(prefix + "/configs", self.list_configs),
            web.put(prefix + "/config/{config_id}", self.put_config),
            web.get(prefix + "/config/{config_id}", self.get_config),
            web.patch(prefix + "/config/{config_id}", self.patch_config),
            web.delete(prefix + "/config/{config_id}", self.delete_config),
            # config model operations
            web.get(prefix + "/configs/validation", self.list_config_models),
            web.get(prefix + "/configs/model", self.get_configs_model),
            web.patch(prefix + "/configs/model", self.update_configs_model),
            web.put(prefix + "/config/{config_id}/validation",
                    self.put_config_validation),
            web.get(prefix + "/config/{config_id}/validation",
                    self.get_config_validation),
            # ca operations
            web.get(prefix + "/ca/cert", self.certificate),
            web.post(prefix + "/ca/sign", self.sign_certificate),
            # system operations
            web.get(prefix + "/system/ping", self.ping),
            web.get(prefix + "/system/ready", self.ready),
            # forwards
            web.get(prefix + "/tsdb", self.forward("/tsdb/")),
            web.get(prefix + "/ui", self.forward("/ui/index.html")),
            web.get(prefix + "/ui/", self.forward("/ui/index.html")),
            *ui_route,
            *tsdb_route,
        ])
        SwaggerFile(
            self.app,
            spec_file=f"{static_path}/api-doc.yaml",
            swagger_ui_settings=SwaggerUiSettings(path=prefix + "/api-doc",
                                                  layout="BaseLayout",
                                                  docExpansion="none"),
        )

    async def start(self) -> None:
        pass

    async def stop(self) -> None:
        if not self.in_shutdown:
            self.in_shutdown = True
            for ws_id in list(self.websocket_handler):
                await clean_ws_handler(ws_id, self.websocket_handler)
            if self.session:
                await self.session.close()

    @staticmethod
    def forward(to: str) -> Callable[[Request], Awaitable[StreamResponse]]:
        async def forward_to(_: Request) -> StreamResponse:
            return web.HTTPFound(to)

        return forward_to

    @staticmethod
    async def ping(_: Request) -> StreamResponse:
        return web.HTTPOk(text="pong", content_type="text/plain")

    @staticmethod
    async def ready(_: Request) -> StreamResponse:
        return web.HTTPOk(text="ok")

    async def list_configs(self, request: Request) -> StreamResponse:
        return await self.stream_response_from_gen(
            request, self.config_handler.list_config_ids())

    async def get_config(self, request: Request) -> StreamResponse:
        config_id = ConfigId(request.match_info["config_id"])
        accept = request.headers.get("accept", "application/json")
        not_found = HTTPNotFound(text="No config with this id")
        if accept == "application/yaml":
            yml = await self.config_handler.config_yaml(config_id)
            return web.Response(
                body=yml.encode("utf-8"),
                content_type="application/yaml") if yml else not_found
        else:
            config = await self.config_handler.get_config(config_id)
            if config:
                headers = {"Resoto-Config-Revision": config.revision}
                return await single_result(request, config.config, headers)
            else:
                return not_found

    async def put_config(self, request: Request) -> StreamResponse:
        config_id = ConfigId(request.match_info["config_id"])
        validate = request.query.get("validate", "true").lower() != "false"
        config = await self.json_from_request(request)
        result = await self.config_handler.put_config(
            ConfigEntity(config_id, config), validate)
        headers = {"Resoto-Config-Revision": result.revision}
        return await single_result(request, result.config, headers)

    async def patch_config(self, request: Request) -> StreamResponse:
        config_id = ConfigId(request.match_info["config_id"])
        patch = await self.json_from_request(request)
        updated = await self.config_handler.patch_config(
            ConfigEntity(config_id, patch))
        headers = {"Resoto-Config-Revision": updated.revision}
        return await single_result(request, updated.config, headers)

    async def delete_config(self, request: Request) -> StreamResponse:
        config_id = ConfigId(request.match_info["config_id"])
        await self.config_handler.delete_config(config_id)
        return HTTPNoContent()

    async def list_config_models(self, request: Request) -> StreamResponse:
        return await self.stream_response_from_gen(
            request, self.config_handler.list_config_validation_ids())

    async def get_config_validation(self, request: Request) -> StreamResponse:
        config_id = request.match_info["config_id"]
        model = await self.config_handler.get_config_validation(config_id)
        return await single_result(request,
                                   to_js(model)) if model else HTTPNotFound(
                                       text="No model for this config.")

    async def get_configs_model(self, request: Request) -> StreamResponse:
        model = await self.config_handler.get_configs_model()
        return await single_result(request, to_js(model))

    async def update_configs_model(self, request: Request) -> StreamResponse:
        js = await self.json_from_request(request)
        kinds: List[Kind] = from_js(js, List[Kind])
        model = await self.config_handler.update_configs_model(kinds)
        return await single_result(request, to_js(model))

    async def put_config_validation(self, request: Request) -> StreamResponse:
        config_id = request.match_info["config_id"]
        js = await self.json_from_request(request)
        js["id"] = config_id
        config_model = from_js(js, ConfigValidation)
        model = await self.config_handler.put_config_validation(config_model)
        return await single_result(request, to_js(model))

    async def certificate(self, _: Request) -> StreamResponse:
        cert, fingerprint = self.cert_handler.authority_certificate
        headers = {
            "SHA256-Fingerprint": fingerprint,
            "Content-Disposition": 'attachment; filename="resoto_root_ca.pem"',
        }
        if self.config.args.psk:
            headers["Authorization"] = "Bearer " + encode_jwt(
                {"sha256_fingerprint": fingerprint}, self.config.args.psk)
        return HTTPOk(headers=headers,
                      body=cert,
                      content_type="application/x-pem-file")

    async def sign_certificate(self, request: Request) -> StreamResponse:
        csr_bytes = await request.content.read()
        cert, fingerprint = self.cert_handler.sign(csr_bytes)
        headers = {"SHA256-Fingerprint": fingerprint}
        return HTTPOk(headers=headers,
                      body=cert,
                      content_type="application/x-pem-file")

    @staticmethod
    async def metrics(_: Request) -> StreamResponse:
        resp = web.Response(body=prometheus_client.generate_latest())
        resp.content_type = prometheus_client.CONTENT_TYPE_LATEST
        return resp

    async def list_all_subscriptions(self, request: Request) -> StreamResponse:
        subscribers = await self.subscription_handler.all_subscribers()
        return await single_result(request, to_json(subscribers))

    async def get_subscriber(self, request: Request) -> StreamResponse:
        subscriber_id = SubscriberId(request.match_info["subscriber_id"])
        subscriber = await self.subscription_handler.get_subscriber(
            subscriber_id)
        return self.optional_json(subscriber,
                                  f"No subscriber with id {subscriber_id}")

    async def list_subscription_for_event(self,
                                          request: Request) -> StreamResponse:
        event_type = request.match_info["event_type"]
        subscribers = await self.subscription_handler.list_subscriber_for(
            event_type)
        return await single_result(request, to_json(subscribers))

    async def update_subscriber(self, request: Request) -> StreamResponse:
        subscriber_id = SubscriberId(request.match_info["subscriber_id"])
        body = await self.json_from_request(request)
        subscriptions = from_js(body, List[Subscription])
        sub = await self.subscription_handler.update_subscriptions(
            subscriber_id, subscriptions)
        return await single_result(request, to_json(sub))

    async def delete_subscriber(self, request: Request) -> StreamResponse:
        subscriber_id = SubscriberId(request.match_info["subscriber_id"])
        await self.subscription_handler.remove_subscriber(subscriber_id)
        return web.HTTPNoContent()

    async def add_subscription(self, request: Request) -> StreamResponse:
        subscriber_id = SubscriberId(request.match_info["subscriber_id"])
        event_type = request.match_info["event_type"]
        timeout = timedelta(seconds=int(request.query.get("timeout", "60")))
        wait_for_completion = request.query.get("wait_for_completion",
                                                "true").lower() != "false"
        sub = await self.subscription_handler.add_subscription(
            subscriber_id, event_type, wait_for_completion, timeout)
        return await single_result(request, to_js(sub))

    async def delete_subscription(self, request: Request) -> StreamResponse:
        subscriber_id = SubscriberId(request.match_info["subscriber_id"])
        event_type = request.match_info["event_type"]
        sub = await self.subscription_handler.remove_subscription(
            subscriber_id, event_type)
        return await single_result(request, to_js(sub))

    async def handle_subscribed(self, request: Request) -> StreamResponse:
        subscriber_id = SubscriberId(request.match_info["subscriber_id"])
        subscriber = await self.subscription_handler.get_subscriber(
            subscriber_id)
        if subscriber_id in self.message_bus.active_listener:
            log.info(
                f"There is already a listener for subscriber: {subscriber_id}. Reject."
            )
            return web.HTTPTooManyRequests(
                text="Only one connection per subscriber is allowed!")
        elif subscriber and subscriber.subscriptions:
            pending = await self.workflow_handler.list_all_pending_actions_for(
                subscriber)
            return await self.listen_to_events(
                request, subscriber_id, list(subscriber.subscriptions.keys()),
                pending)
        else:
            return web.HTTPNotFound(
                text=
                f"No subscriber with this id: {subscriber_id} or no subscriptions"
            )

    async def redirect_to_api_doc(self, request: Request) -> StreamResponse:
        raise web.HTTPFound("api-doc")

    async def handle_events(self, request: Request) -> StreamResponse:
        show = request.query["show"].split(
            ",") if "show" in request.query else ["*"]
        return await self.listen_to_events(request,
                                           SubscriberId(str(uuid.uuid1())),
                                           show)

    async def listen_to_events(
        self,
        request: Request,
        listener_id: SubscriberId,
        event_types: List[str],
        initial_messages: Optional[Sequence[Message]] = None,
    ) -> WebSocketResponse:
        async def handle_message(msg: str) -> None:
            js = json.loads(msg)
            if "data" in js:
                js["data"]["subscriber_id"] = listener_id
            message: Message = from_js(js, Message)
            if isinstance(message, Action):
                raise AttributeError(
                    "Actors should not emit action messages. ")
            elif isinstance(message, ActionDone):
                await self.workflow_handler.handle_action_done(message)
            elif isinstance(message, ActionError):
                await self.workflow_handler.handle_action_error(message)
            else:
                await self.message_bus.emit(message)

        return await accept_websocket(
            request,
            handle_incoming=handle_message,
            outgoing_context=partial(self.message_bus.subscribe, listener_id,
                                     event_types),
            websocket_handler=self.websocket_handler,
            initial_messages=initial_messages,
        )

    async def handle_work_tasks(self, request: Request) -> WebSocketResponse:
        worker_id = WorkerId(uuid_str())
        task_param = request.query.get("task")
        if not task_param:
            raise AttributeError(
                "A worker needs to define at least one task that it can perform"
            )
        attrs = {
            k: re.split("\\s*,\\s*", v)
            for k, v in request.query.items() if k != "task"
        }
        task_descriptions = [
            WorkerTaskDescription(name, attrs)
            for name in re.split("\\s*,\\s*", task_param)
        ]

        async def handle_message(msg: str) -> None:
            tr = from_js(json.loads(msg), WorkerTaskResult)
            if tr.result == "error":
                error = tr.error if tr.error else "worker signalled error without detailed error message"
                await self.worker_task_queue.error_task(
                    worker_id, tr.task_id, error)
            elif tr.result == "done":
                await self.worker_task_queue.acknowledge_task(
                    worker_id, tr.task_id, tr.data)
            else:
                log.info(f"Do not understand this message: {msg}")

        def task_json(task: WorkerTask) -> str:
            return to_js_str(task.to_json())

        return await accept_websocket(
            request,
            handle_incoming=handle_message,
            outgoing_context=partial(self.worker_task_queue.attach, worker_id,
                                     task_descriptions),
            websocket_handler=self.websocket_handler,
            outgoing_fn=task_json,
        )

    async def create_work(self, request: Request) -> StreamResponse:
        attrs = {k: v for k, v in request.query.items() if k != "task"}
        future = asyncio.get_event_loop().create_future()
        task = WorkerTask(TaskId(uuid_str()), "test", attrs, {
            "some": "data",
            "foo": "bla"
        }, future, timedelta(seconds=3))
        await self.worker_task_queue.add_task(task)
        await future
        return web.HTTPOk()

    async def list_work(self, request: Request) -> StreamResponse:
        def wt_to_js(ip: WorkerTaskInProgress) -> Json:
            return {
                "task": ip.task.to_json(),
                "worker": ip.worker.worker_id,
                "retry_counter": ip.retry_counter,
                "deadline": to_json(ip.deadline),
            }

        return web.json_response([
            wt_to_js(ot)
            for ot in self.worker_task_queue.outstanding_tasks.values()
        ])

    async def model_uml(self, request: Request) -> StreamResponse:
        output = request.query.get("output", "svg")
        show = request.query["show"].split(
            ",") if "show" in request.query else None
        hide = request.query["hide"].split(
            ",") if "hide" in request.query else None
        with_inheritance = request.query.get("with_inheritance",
                                             "true") != "false"
        with_base_classes = request.query.get("with_base_classes",
                                              "true") != "false"
        with_subclasses = request.query.get("with_subclasses",
                                            "false") != "false"
        dependency = set(request.query["dependency"].split(
            ",")) if "dependency" in request.query else None
        with_predecessors = request.query.get("with_predecessors",
                                              "false") != "false"
        with_successors = request.query.get("with_successors",
                                            "false") != "false"
        with_properties = request.query.get("with_properties",
                                            "true") != "false"
        link_classes = request.query.get("link_classes", "false") != "false"
        result = await self.model_handler.uml_image(
            output=output,
            show_packages=show,
            hide_packages=hide,
            with_inheritance=with_inheritance,
            with_base_classes=with_base_classes,
            with_subclasses=with_subclasses,
            dependency_edges=dependency,
            with_predecessors=with_predecessors,
            with_successors=with_successors,
            with_properties=with_properties,
            link_classes=link_classes,
        )
        response = web.StreamResponse()
        mt = {"svg": "image/svg+xml", "png": "image/png"}
        response.headers["Content-Type"] = mt[output]
        await response.prepare(request)
        await response.write_eof(result)
        return response

    async def get_model(self, request: Request) -> StreamResponse:
        md = await self.model_handler.load_model()
        return await single_result(request, to_js(md.kinds.values()))

    async def update_model(self, request: Request) -> StreamResponse:
        js = await self.json_from_request(request)
        kinds: List[Kind] = from_js(js, List[Kind])
        model = await self.model_handler.update_model(kinds)
        return await single_result(request, to_js(model))

    async def get_node(self, request: Request) -> StreamResponse:
        graph_id = request.match_info.get("graph_id", "resoto")
        node_id = request.match_info.get("node_id", "root")
        graph = self.db.get_graph_db(graph_id)
        model = await self.model_handler.load_model()
        node = await graph.get_node(model, node_id)
        if node is None:
            return web.HTTPNotFound(
                text=f"No such node with id {node_id} in graph {graph_id}")
        else:
            return await single_result(request, node)

    async def create_node(self, request: Request) -> StreamResponse:
        graph_id = request.match_info.get("graph_id", "resoto")
        node_id = request.match_info.get("node_id", "some_existing")
        parent_node_id = request.match_info.get("parent_node_id", "root")
        graph = self.db.get_graph_db(graph_id)
        item = await self.json_from_request(request)
        md = await self.model_handler.load_model()
        node = await graph.create_node(md, node_id, item, parent_node_id)
        return await single_result(request, node)

    async def update_node(self, request: Request) -> StreamResponse:
        graph_id = request.match_info.get("graph_id", "resoto")
        node_id = request.match_info.get("node_id", "some_existing")
        section = section_of(request)
        graph = self.db.get_graph_db(graph_id)
        patch = await self.json_from_request(request)
        md = await self.model_handler.load_model()
        node = await graph.update_node(md, node_id, patch, False, section)
        return await single_result(request, node)

    async def delete_node(self, request: Request) -> StreamResponse:
        graph_id = request.match_info.get("graph_id", "resoto")
        node_id = request.match_info.get("node_id", "some_existing")
        if node_id == "root":
            raise AttributeError("Root node can not be deleted!")
        graph = self.db.get_graph_db(graph_id)
        await graph.delete_node(node_id)
        return web.HTTPNoContent()

    async def update_nodes(self, request: Request) -> StreamResponse:
        graph_id = request.match_info.get("graph_id", "resoto")
        allowed = {*Section.content, "id", "revision"}
        updates: Dict[str, Json] = {}
        async for elem in self.to_json_generator(request):
            keys = set(elem.keys())
            assert keys.issubset(
                allowed), f"Invalid json. Allowed keys are: {allowed}"
            assert "id" in elem, f"No id given for element {elem}"
            assert keys.intersection(
                Section.content), f"No update provided for element {elem}"
            uid = elem["id"]
            assert uid not in updates, f"Only one update allowed per id! {elem}"
            del elem["id"]
            updates[uid] = elem
        db = self.db.get_graph_db(graph_id)
        model = await self.model_handler.load_model()
        result_gen = db.update_nodes(model, updates)
        return await self.stream_response_from_gen(request, result_gen)

    async def list_graphs(self, request: Request) -> StreamResponse:
        graphs = await self.db.list_graphs()
        return await single_result(request, graphs)

    async def create_graph(self, request: Request) -> StreamResponse:
        graph_id = request.match_info.get("graph_id", "resoto")
        if "_" in graph_id:
            raise AttributeError("Graph name should not have underscores!")
        graph = await self.db.create_graph(graph_id)
        model = await self.model_handler.load_model()
        root = await graph.get_node(model, "root")
        return web.json_response(root)

    async def merge_graph(self, request: Request) -> StreamResponse:
        log.info("Received merge_graph request")
        graph_id = request.match_info.get("graph_id", "resoto")
        task_id: Optional[TaskId] = None
        if tid := request.headers.get("Resoto-Worker-Task-Id"):
            task_id = TaskId(tid)
        db = self.db.get_graph_db(graph_id)
        it = self.to_line_generator(request)
        info = await merge_graph_process(
            db, self.event_sender, self.config, it,
            self.config.graph_update.merge_max_wait_time(), None, task_id)
        return web.json_response(to_js(info))
Exemplo n.º 24
0
 async def update_many(self, elements: List[T]) -> None:
     for elem in elements:
         key = self.key_fn(elem)
         self.items[key] = to_js(elem)
Exemplo n.º 25
0
 async def update(self, t: T) -> T:
     self.items[self.key_fn(t)] = to_js(t)
     return t
Exemplo n.º 26
0
 async def put_config_validation(self,
                                 cfg: ConfigValidation) -> ConfigValidation:
     async with self.session.put(self.base_path +
                                 f"/config/{cfg.id}/validation",
                                 json=to_js(cfg)) as response:
         return from_js(await response.json(), ConfigValidation)
Exemplo n.º 27
0
def roundtrip(obj: Any) -> None:
    js = to_js(obj)
    again = from_js(js, type(obj))
    assert DeepDiff(obj, again) == {}, f"Json: {js} serialized as {again}"
Exemplo n.º 28
0
 async def get_config_validation(self, request: Request) -> StreamResponse:
     config_id = request.match_info["config_id"]
     model = await self.config_handler.get_config_validation(config_id)
     return await single_result(request,
                                to_js(model)) if model else HTTPNotFound(
                                    text="No model for this config.")
Exemplo n.º 29
0
def to_json(obj: BaseResource) -> Json:
    return {"kind": obj.kind(), **to_js(obj)}
Exemplo n.º 30
0
 async def get_configs_model(self, request: Request) -> StreamResponse:
     model = await self.config_handler.get_configs_model()
     return await single_result(request, to_js(model))