示例#1
0
async def test_cli(core_client: ApiClient) -> None:
    # make sure we have a clean slate
    with suppress(Exception):
        await core_client.delete_graph(g)
    await core_client.create_graph(g)
    await core_client.merge_graph(g, create_graph("test"))

    # evaluate search with count
    result = await core_client.cli_evaluate(g, "search all | count kind")
    assert len(result) == 1
    parsed, to_execute = result[0]
    assert len(parsed.commands) == 2
    assert (parsed.commands[0].cmd, parsed.commands[1].cmd) == ("search",
                                                                "count")
    assert len(to_execute) == 2
    assert (to_execute[0].cmd, to_execute[1].cmd) == ("execute_search",
                                                      "aggregate_to_count")

    # execute search with count
    executed = await core_client.cli_execute(
        g, "search is(foo) or is(bla) | count kind")
    assert executed == [
        "cloud: 1", "foo: 11", "bla: 100", "total matched: 112",
        "total unmatched: 0"
    ]

    # list all cli commands
    info = await core_client.cli_info()
    assert len(info.commands) == 33
示例#2
0
async def test_merge_process(event_sender: AnalyticsEventSender,
                             graph_db: ArangoGraphDB,
                             foo_kinds: List[Kind]) -> None:
    # set explicitly (is done in main explicitly as well)
    set_start_method("spawn")

    # wipe any existing data
    await graph_db.wipe()
    # store the model in db, so it can be loaded by the sub process
    graph_db.db.collection("model").insert_many([to_js(a) for a in foo_kinds])
    # define args to parse for the sub process
    config = empty_config([
        "--graphdb-username", "test", "--graphdb-password", "test",
        "--graphdb-database", "test"
    ])
    # create sample graph data to insert
    graph = create_graph("test")

    await outer_edge_db(graph_db.db,
                        "deferred_outer_edges").create_update_schema()

    async def iterator() -> AsyncGenerator[bytes, None]:
        for node in graph.nodes():
            yield bytes(json.dumps(graph.nodes[node]), "utf-8")
        for from_node, to_node, data in graph.edges(data=True):
            yield bytes(
                json.dumps({
                    "from": from_node,
                    "to": to_node,
                    "edge_type": data["edge_type"]
                }), "utf-8")
        yield bytes(
            json.dumps({
                "from_selector": {
                    "node_id": "id_123"
                },
                "to_selector": {
                    "node_id": "id_456"
                },
                "edge_type": "delete"
            }),
            "utf-8",
        )

    result = await merge_graph_process(graph_db, event_sender, config,
                                       iterator(), timedelta(seconds=30), None,
                                       TaskId("test_task_123"))
    assert result == GraphUpdate(112, 1, 0, 212, 0, 0)
    elem = graph_db.db.collection("deferred_outer_edges").all().next()
    assert elem["_key"] == "test_task_123"
    assert elem["task_id"] == "test_task_123"
    assert elem["edges"][0] == {
        "from_node": "id_123",
        "to_node": "id_456",
        "edge_type": "delete"
    }
示例#3
0
async def test_merge_process(
    event_sender: AnalyticsEventSender, graph_db: ArangoGraphDB, foo_kinds: List[Kind]
) -> None:
    # set explicitly (is done in main explicitly as well)
    set_start_method("spawn")

    # wipe any existing data
    await graph_db.wipe()
    # store the model in db, so it can be loaded by the sub process
    graph_db.db.collection("model").insert_many([to_js(a) for a in foo_kinds])
    # define args to parse for the sub process
    args = parse_args(["--graphdb-username", "test", "--graphdb-password", "test", "--graphdb-database", "test"])
    # create sample graph data to insert
    graph = create_graph("test")

    async def iterator() -> AsyncGenerator[bytes, None]:
        for node in graph.nodes():
            yield bytes(json.dumps(graph.nodes[node]), "utf-8")
        for from_node, to_node, data in graph.edges(data=True):
            yield bytes(json.dumps({"from": from_node, "to": to_node, "edge_type": data["edge_type"]}), "utf-8")

    result = await merge_graph_process(graph_db, event_sender, args, iterator(), timedelta(seconds=30), None)
    assert result == GraphUpdate(112, 1, 0, 212, 0, 0)
示例#4
0
async def test_graph_api(core_client: ApiClient) -> None:
    # make sure we have a clean slate
    with suppress(Exception):
        core_client.delete_graph(g)

    # create a new graph
    graph = AccessJson(core_client.create_graph(g))
    assert graph.id == "root"
    assert graph.reported.kind == "graph_root"

    # list all graphs
    graphs = core_client.list_graphs()
    assert g in graphs

    # get one specific graph
    graph: AccessJson = AccessJson(core_client.get_graph(g))  # type: ignore
    assert graph.id == "root"
    assert graph.reported.kind == "graph_root"

    # wipe the data in the graph
    assert core_client.delete_graph(g, truncate=True) == "Graph truncated."
    assert g in core_client.list_graphs()

    # create a node in the graph
    uid = rnd_str()
    node = AccessJson(core_client.create_node("root", uid, {"identifier": uid, "kind": "child", "name": "max"}, g))
    assert node.id == uid
    assert node.reported.name == "max"

    # update a node in the graph
    node = AccessJson(core_client.patch_node(uid, {"name": "moritz"}, "reported", g))
    assert node.id == uid
    assert node.reported.name == "moritz"

    # get the node
    node = AccessJson(core_client.get_node(uid, g))
    assert node.id == uid
    assert node.reported.name == "moritz"

    # delete the node
    core_client.delete_node(uid, g)
    with pytest.raises(AttributeError):
        # node can not be found
        core_client.get_node(uid, g)

    # merge a complete graph
    merged = core_client.merge_graph(graph_to_json(create_graph("test")), g)
    assert merged == rc.GraphUpdate(112, 1, 0, 212, 0, 0)

    # batch graph update and commit
    batch1_id, batch1_info = core_client.add_to_batch(graph_to_json(create_graph("hello")), "batch1", g)
    assert batch1_info == rc.GraphUpdate(0, 100, 0, 0, 0, 0)
    assert batch1_id == "batch1"
    batch_infos = AccessJson.wrap_list(core_client.list_batches(g))
    assert len(batch_infos) == 1
    # assert batch_infos[0].id == batch1_id
    assert batch_infos[0].affected_nodes == ["collector"]  # replace node
    assert batch_infos[0].is_batch is True
    core_client.commit_batch(batch1_id, g)

    # batch graph update and abort
    batch2_id, batch2_info = core_client.add_to_batch(graph_to_json(create_graph("bonjour")), "batch2", g)
    assert batch2_info == rc.GraphUpdate(0, 100, 0, 0, 0, 0)
    assert batch2_id == "batch2"
    core_client.abort_batch(batch2_id, g)

    # update nodes
    update = [{"id": node["id"], "reported": {"name": "bruce"}} for _, node in create_graph("foo").nodes(data=True)]
    updated_nodes = core_client.patch_nodes(update, g)
    assert len(updated_nodes) == 113
    for n in updated_nodes:
        assert n.get("reported", {}).get("name") == "bruce"

    # create the raw search
    raw = core_client.search_graph_raw('id("3")', g)
    assert raw == {
        "query": "LET filter0 = (FOR m0 in graphtest FILTER m0._key == @b0  RETURN m0) "
        'FOR result in filter0 RETURN UNSET(result, ["flat"])',
        "bind_vars": {"b0": "3"},
    }

    # estimate the search
    cost = core_client.search_graph_explain('id("3")', g)
    assert cost.full_collection_scan is False
    assert cost.rating == rc.EstimatedQueryCostRating.simple

    # search list
    result_list = list(core_client.search_list('id("3") -[0:]->', graph=g))
    assert len(result_list) == 11  # one parent node and 10 child nodes
    assert result_list[0].get("id") == "3"  # first node is the parent node

    # search graph
    result_graph = list(core_client.search_graph('id("3") -[0:]->', graph=g))
    assert len(result_graph) == 21  # 11 nodes + 10 edges
    assert result_list[0].get("id") == "3"  # first node is the parent node

    # aggregate
    result_aggregate = core_client.search_aggregate("aggregate(reported.kind as kind: sum(1) as count): all", g)
    assert {r["group"]["kind"]: r["count"] for r in result_aggregate} == {
        "bla": 100,
        "cloud": 1,
        "foo": 11,
        "graph_root": 1,
    }

    # delete the graph
    assert core_client.delete_graph(g) == "Graph deleted."
    assert g not in core_client.list_graphs()