Example #1
0
 def to_json(o: Workflow, **_: object) -> Json:
     env = {"environment": o.environment} if o.environment else {}
     return {
         "id": o.id,
         "name": o.name,
         "steps": to_json(o.steps),
         "triggers": to_json(o.triggers),
         "on_surpass": to_js(o.on_surpass),
         **env,
     }
Example #2
0
 def wt_to_js(ip: WorkerTaskInProgress) -> Json:
     return {
         "task": ip.task.to_json(),
         "worker": ip.worker.worker_id,
         "retry_counter": ip.retry_counter,
         "deadline": to_json(ip.deadline),
     }
Example #3
0
 async def on_start() -> None:
     # queue must be created inside an async function!
     cli_deps.extend(forked_tasks=Queue())
     await db.start()
     await event_sender.start()
     await subscriptions.start()
     await scheduler.start()
     await worker_task_queue.start()
     await event_emitter.start()
     await cli.start()
     await task_handler.start()
     await api.start()
     if created:
         await event_sender.core_event(CoreEvent.SystemInstalled)
     await event_sender.core_event(
         CoreEvent.SystemStarted,
         {
             "version": version(),
             "created_at": to_json(system_data.created_at),
             "system": platform.system(),
             "platform": platform.platform(),
             "inside_docker": info.inside_docker,
         },
         cpu_count=info.cpus,
         mem_total=info.mem_total,
         mem_available=info.mem_available,
     )
Example #4
0
 def to_json(o: Job, **_: object) -> Json:
     wait = {
         "wait_trigger": to_js(o.wait[0]),
         "wait_timeout": to_json(o.wait[1])
     } if o.wait else {}
     env = {"environment": o.environment} if o.environment else {}
     return {
         "id": o.id,
         "name": o.name,
         "command": to_js(o.command),
         "trigger": to_js(o.trigger),
         "timeout": to_json(o.timeout),
         "active": o.active,
         **env,
         **wait,
     }
Example #5
0
 async def update_subscriber(self, request: Request) -> StreamResponse:
     subscriber_id = request.match_info["subscriber_id"]
     body = await self.json_from_request(request)
     subscriptions = from_js(body, List[Subscription])
     sub = await self.subscription_handler.update_subscriptions(
         subscriber_id, subscriptions)
     return await single_result(request, to_json(sub))
Example #6
0
 def line_to_js(line: ParsedCommandLine) -> Json:
     parsed_commands = to_json(line.parsed_commands.commands)
     execute_commands = [{
         "cmd": part.command.name,
         "arg": part.arg
     } for part in line.executable_commands]
     return {
         "parsed": parsed_commands,
         "execute": execute_commands,
         "env": line.parsed_commands.env
     }
Example #7
0
def graph_access() -> GraphAccess:
    g = MultiDiGraph()

    def add_edge(from_node: str, to_node: str, edge_type: str) -> None:
        key = GraphAccess.edge_key(from_node, to_node, edge_type)
        g.add_edge(from_node, to_node, key, edge_type=edge_type)

    g.add_node("1", reported=to_json(FooTuple("1")), desired={"name": "a"}, metadata={"version": 1}, kinds=["foo"])
    g.add_node("2", reported=to_json(FooTuple("2")), desired={"name": "b"}, metadata={"version": 2}, kinds=["foo"])
    g.add_node("3", reported=to_json(FooTuple("3")), desired={"name": "c"}, metadata={"version": 3}, kinds=["foo"])
    g.add_node("4", reported=to_json(FooTuple("4")), desired={"name": "d"}, metadata={"version": 4}, kinds=["foo"])
    add_edge("1", "2", edge_type=EdgeType.default)
    add_edge("1", "3", edge_type=EdgeType.default)
    add_edge("2", "3", edge_type=EdgeType.default)
    add_edge("2", "4", edge_type=EdgeType.default)
    add_edge("3", "4", edge_type=EdgeType.default)
    add_edge("1", "2", edge_type=EdgeType.delete)
    add_edge("1", "3", edge_type=EdgeType.delete)
    add_edge("1", "4", edge_type=EdgeType.delete)
    return GraphAccess(g)
Example #8
0
async def respond_json(
        gen: AsyncIterator[JsonElement]) -> AsyncGenerator[str, None]:
    sep = ","
    yield "["
    first = True
    async for item in gen:
        js = json.dumps(to_json(item))
        if not first:
            yield sep
        yield js
        first = False
    yield "]"
Example #9
0
 async def update_merge_graph_batch(self,
                                    request: Request) -> StreamResponse:
     log.info("Received put_sub_graph_batch request")
     graph_id = request.match_info.get("graph_id", "resoto")
     db = self.db.get_graph_db(graph_id)
     rnd = "".join(SystemRandom().choice(string.ascii_letters)
                   for _ in range(12))
     batch_id = request.query.get("batch_id", rnd)
     it = self.to_line_generator(request)
     info = await merge_graph_process(db, self.event_sender, self.args, it,
                                      self.merge_max_wait_time, batch_id)
     return web.json_response(to_json(info), headers={"BatchId": batch_id})
Example #10
0
async def respond_yaml(
        gen: AsyncIterator[JsonElement]) -> AsyncGenerator[str, None]:
    flag = False
    sep = "---"
    async for item in gen:
        yml = yaml.dump(to_json(item),
                        default_flow_style=False,
                        sort_keys=False)
        if flag:
            yield sep
        yield yml
        flag = True
Example #11
0
def test_access_node() -> None:
    g = MultiDiGraph()
    g.add_node("1", reported=to_json(FooTuple(a="1")))
    access: GraphAccess = GraphAccess(g)
    elem: Json = node(access, "1")  # type: ignore
    assert elem["hash"] == "153c1a5c002f6213a95383f33b63aa18b8ed6939f57418fb0f27312576f0cea4"
    assert elem["reported"] == {
        "a": "1",
        "b": 0,
        "c": [],
        "d": "foo",
        "e": {"a": 12, "b": 32},
        "f": "2021-03-29",
        "g": 1.234567,
        "kind": "foo",
    }
    assert access.node("2") is None
Example #12
0
async def respond_text(
        gen: AsyncIterator[JsonElement]) -> AsyncGenerator[str, None]:
    def filter_attrs(js: Json) -> Json:
        result: Json = js
        for path in plain_text_blacklist:
            del_value_in_path(js, path)
        return result

    def to_result(js: JsonElement) -> JsonElement:
        # if js is a node, the resulting content should be filtered
        return filter_attrs(js) if is_node(js) else js  # type: ignore

    try:
        flag = False
        sep = "---"
        async for item in gen:
            js = to_json(item)
            if isinstance(js, (dict, list)):
                if flag:
                    yield sep
                yml = yaml.dump(to_result(js),
                                default_flow_style=False,
                                sort_keys=False)
                yield yml
            else:
                yield str(js)
            flag = True
    except QueryTookToLongError:
        yield ("\n\n---------------------------------------------------\n"
               "Query took too long.\n"
               "Try one of the following:\n"
               "- refine your query\n"
               "- add a limit to your query\n"
               "- define a longer timeout via env var search_timeout\n"
               "  e.g. $> search_timeout=60s query all\n"
               "---------------------------------------------------\n\n")
Example #13
0
async def respond_ndjson(
        gen: AsyncIterator[JsonElement]) -> AsyncGenerator[str, None]:
    async for item in gen:
        js = json.dumps(to_json(item), check_circular=False)
        yield js
Example #14
0
 async def list_all_subscriptions(self, request: Request) -> StreamResponse:
     subscribers = await self.subscription_handler.all_subscribers()
     return await single_result(request, to_json(subscribers))
Example #15
0
def roundtrip(obj: Any, clazz: Type[object]) -> None:
    js = to_json(obj)
    again = from_js(js, clazz)
    assert type(obj) == type(again)
    assert DeepDiff(obj, again) == {}, f"Json: {js} serialized as {again}"
Example #16
0
    async def execute_parsed(
            self, request: Request, command: str,
            parsed: List[ParsedCommandLine]) -> StreamResponse:
        # make sure, all requirements are fulfilled
        not_met_requirements = [
            not_met for line in parsed for not_met in line.unmet_requirements
        ]
        # what is the accepted content type
        # only required for multipart requests
        boundary = "----cli"
        mp_response = web.StreamResponse(
            status=200,
            reason="OK",
            headers={"Content-Type": f"multipart/mixed;boundary={boundary}"})

        async def list_or_gen(
                current: ParsedCommandLine) -> Tuple[Optional[int], Stream]:
            maybe_count, out_gen = await current.execute()
            if (request.headers.get("accept") == "text/plain"
                    and current.executable_commands and
                    not isinstance(current.executable_commands[-1].command,
                                   (OutputTransformer, PreserveOutputFormat))):
                out_gen = await ListCommand(self.cli.dependencies
                                            ).parse(ctx=current.ctx
                                                    ).flow(out_gen)

            return maybe_count, out_gen

        if not_met_requirements:
            requirements = [
                req for line in parsed for cmd in line.executable_commands
                for req in cmd.action.required
            ]
            data = {
                "command": command,
                "env": dict(request.query),
                "required": to_json(requirements)
            }
            return web.json_response(data, status=424)
        elif len(parsed) == 1:
            first_result = parsed[0]
            count, generator = await list_or_gen(first_result)
            # flat the results from 0 or 1
            async with generator.stream() as streamer:
                gen = await force_gen(streamer)
                if first_result.produces.json:
                    return await self.stream_response_from_gen(
                        request, gen, count)
                elif first_result.produces.file_path:
                    await mp_response.prepare(request)
                    await Api.multi_file_response(parsed, gen, boundary,
                                                  mp_response)
                    return mp_response
                else:
                    raise AttributeError(
                        f"Can not handle type: {first_result.produces}")
        elif len(parsed) > 1:
            await mp_response.prepare(request)
            for single in parsed:
                count, generator = await list_or_gen(single)
                async with generator.stream() as streamer:
                    gen = await force_gen(streamer)
                    if single.produces.json:
                        with MultipartWriter(repr(single.produces),
                                             boundary) as mp:
                            content_type, result_stream = await result_binary_gen(
                                request, gen)
                            mp.append_payload(
                                AsyncIterablePayload(result_stream,
                                                     content_type=content_type,
                                                     headers=single.envelope))
                            await mp.write(mp_response, close_boundary=True)
                    elif single.produces.file_path:
                        await Api.multi_file_response(parsed, gen, boundary,
                                                      mp_response)
                    else:
                        raise AttributeError(
                            f"Can not handle type: {single.produces}")
            await mp_response.write_eof()
            return mp_response
        else:
            raise AttributeError("No command could be parsed!")
Example #17
0
    async def update_merge_graph_batch(self,
                                       request: Request) -> StreamResponse:
        log.info("Received put_sub_graph_batch request")
        graph_id = request.match_info.get("graph_id", "resoto")
        task_id: Optional[TaskId] = None
        if tid := request.headers.get("Resoto-Worker-Task-Id"):
            task_id = TaskId(tid)
        db = self.db.get_graph_db(graph_id)
        rnd = "".join(SystemRandom().choice(string.ascii_letters)
                      for _ in range(12))
        batch_id = request.query.get("batch_id", rnd)
        it = self.to_line_generator(request)
        info = await merge_graph_process(
            db, self.event_sender, self.config, it,
            self.config.graph_update.merge_max_wait_time(), batch_id, task_id)
        return web.json_response(to_json(info), headers={"BatchId": batch_id})

    async def list_batches(self, request: Request) -> StreamResponse:
        graph_db = self.db.get_graph_db(
            request.match_info.get("graph_id", "resoto"))
        batch_updates = await graph_db.list_in_progress_updates()
        return web.json_response(
            [b for b in batch_updates if b.get("is_batch")])

    async def commit_batch(self, request: Request) -> StreamResponse:
        graph_db = self.db.get_graph_db(
            request.match_info.get("graph_id", "resoto"))
        batch_id = request.match_info.get("batch_id", "some_existing")
        await graph_db.commit_batch_update(batch_id)
        return web.HTTPOk(body="Batch committed.")
Example #18
0
 def optional_json(o: Any, hint: str) -> StreamResponse:
     if o:
         return web.json_response(to_json(o))
     else:
         return web.HTTPNotFound(text=hint)
Example #19
0
 async def list_subscription_for_event(self,
                                       request: Request) -> StreamResponse:
     event_type = request.match_info["event_type"]
     subscribers = await self.subscription_handler.list_subscriber_for(
         event_type)
     return await single_result(request, to_json(subscribers))