def _create_identity_delta(): """Create an identity-delta.""" rule = Rule.identity_rule() identity_delta = { "rule": rule, "lhs_instance": {}, "rhs_instance": {} } return identity_delta
def _refine_rule_hierarchy(hierarchy, rule_hierarchy, lhs_instances): new_lhs_instances = {} new_rules = {} new_rule_homomorphisms = {} for graph, rule in rule_hierarchy["rules"].items(): # refine rule new_lhs_instance = rule.refine(hierarchy.get_graph(graph), lhs_instances[graph]) new_lhs_instances[graph] = new_lhs_instance # Update rule homomorphisms for (source, target), (lhs_h, p_h, rhs_h) in rule_hierarchy["rule_homomorphisms"].items(): typing = hierarchy.get_typing(source, target) source_rule = rule_hierarchy["rules"][source] target_rule = rule_hierarchy["rules"][target] for node in source_rule.lhs.nodes(): if node not in lhs_h.keys(): source_node = new_lhs_instances[source][node] target_node = typing[source_node] target_lhs_node = keys_by_value(new_lhs_instances[target], target_node)[0] lhs_h[node] = target_lhs_node if node in source_rule.p_lhs.values(): source_p_node = keys_by_value(source_rule.p_lhs, node)[0] target_p_node = keys_by_value(target_rule.p_lhs, node)[0] p_h[source_p_node] = target_p_node source_rhs_node = source_rule.p_rhs[source_p_node] target_rhs_node = target_rule.p_rhs[target_p_node] rhs_h[source_rhs_node] = target_rhs_node if len(rule_hierarchy["rules"]) == 0: for graph in hierarchy.graphs(): rule_hierarchy["rules"][graph] = Rule.identity_rule() new_lhs_instances[graph] = dict() for (s, t) in hierarchy.typings(): rule_hierarchy["rule_homomorphisms"][(s, t)] = (dict(), dict(), dict()) else: for graph, rule in rule_hierarchy["rules"].items(): # add identity rules where needed # to preserve the info on p/rhs_typing # add ancestors that are not included in rule hierarchy for ancestor, typing in hierarchy.get_ancestors(graph).items(): if ancestor not in rule_hierarchy["rules"] and\ ancestor not in new_rules: # Find a typing of ancestor by the graph l_pred, l_pred_pred, l_pred_l_graph = pullback( hierarchy.get_graph(ancestor), rule.lhs, hierarchy.get_graph(graph), typing, new_lhs_instances[graph]) new_rules[ancestor] = Rule(p=l_pred, lhs=l_pred) new_lhs_instances[ancestor] = l_pred_pred r_pred_r_graph = { v: rule.p_rhs[k] for k, v in l_pred_l_graph.items() } for successor in hierarchy.successors(ancestor): if successor in rule_hierarchy["rules"]: if successor == graph: new_rule_homomorphisms[(ancestor, graph)] = ( l_pred_l_graph, l_pred_l_graph, r_pred_r_graph) else: path = hierarchy.shortest_path( graph, successor) lhs_h, p_h, rhs_h = rule_hierarchy[ "rule_homomorphisms"][(path[0], path[1])] for i in range(2, len(path)): new_lhs_h, new_p_h, new_rhs_h = rule_hierarchy[ "rule_homomorphisms"][(path[i - 1], path[i])] lhs_h = compose(lhs_h, new_lhs_h) p_h = compose(p_h, new_p_h) rhs_h = compose(rhs_h, new_rhs_h) new_rule_homomorphisms[( ancestor, successor)] = (compose( l_pred_l_graph, lhs_h), compose(l_pred_l_graph, p_h), compose( r_pred_r_graph, rhs_h)) if successor in new_rules: lhs_h = { k: keys_by_value( new_lhs_instances[successor], hierarchy.get_typing(ancestor, successor)[v])[0] for k, v in new_lhs_instances[ancestor].items() } new_rule_homomorphisms[(ancestor, successor)] = (lhs_h, lhs_h, lhs_h) for predecessor in hierarchy.predecessors(ancestor): if predecessor in rule_hierarchy["rules"] or\ predecessor in new_rules: lhs_h = { k: keys_by_value( new_lhs_instances[ancestor], hierarchy.get_typing( predecessor, ancestor)[v])[0] for k, v in new_lhs_instances[predecessor].items() } new_rule_homomorphisms[(predecessor, ancestor)] = (lhs_h, lhs_h, lhs_h) for descendant, typing in hierarchy.get_descendants(graph).items(): if descendant not in rule_hierarchy["rules"] and\ descendant not in new_rules: l_suc, l_graph_l_suc, l_suc_suc = image_factorization( rule.lhs, hierarchy.get_graph(descendant), compose(new_lhs_instances[graph], typing)) new_rules[descendant] = Rule(p=l_suc, lhs=l_suc) new_lhs_instances[descendant] = l_suc_suc p_graph_p_suc = { k: l_graph_l_suc[v] for k, v in rule.p_lhs.items() } for predecessor in hierarchy.predecessors(descendant): if predecessor in rule_hierarchy["rules"]: if predecessor == graph: new_rule_homomorphisms[( predecessor, descendant)] = (l_graph_l_suc, p_graph_p_suc, p_graph_p_suc) else: path = hierarchy.shortest_path( predecessor, graph) lhs_h, p_h, rhs_h = rule_hierarchy[ "rule_homomorphisms"][(path[0], path[1])] for i in range(2, len(path)): new_lhs_h, new_p_h, new_rhs_h = rule_hierarchy[ "rule_homomorphisms"][(path[i - 1], path[i])] lhs_h = compose(lhs_h, new_lhs_h) p_h = compose(p_h, new_p_h) rhs_h = compose(rhs_h, new_rhs_h) new_rule_homomorphisms[( predecessor, descendant)] = (compose( lhs_h, l_graph_l_suc), compose( p_h, p_graph_p_suc), compose( rhs_h, p_graph_p_suc)) if predecessor in new_rules: lhs_h = { k: keys_by_value( new_lhs_instances[descendant], hierarchy.get_typing( predecessor, descendant)[v])[0] for k, v in new_lhs_instances[predecessor].items() } new_rule_homomorphisms[(predecessor, descendant)] = (lhs_h, lhs_h, lhs_h) for successor in hierarchy.successors(descendant): if successor in rule_hierarchy["rules"] or\ successor in new_rules: lhs_h = { k: keys_by_value( new_lhs_instances[successor], hierarchy.get_typing( descendant, successor)[v])[0] for k, v in new_lhs_instances[descendant].items() } new_rule_homomorphisms[(descendant, successor)] = (lhs_h, lhs_h, lhs_h) rule_hierarchy["rules"].update(new_rules) rule_hierarchy["rule_homomorphisms"].update(new_rule_homomorphisms) return new_lhs_instances
def get_rule_liftings(tx, graph_id, rule, instance, p_typing=None): """Execute the query finding rule liftings.""" if p_typing is None: p_typing = {} liftings = {} if len(rule.lhs.nodes()) > 0: lhs_vars = { n: n for n in rule.lhs.nodes()} match_instance_vars = {lhs_vars[k]: v for k, v in instance.items()} # Match nodes query = "// Match nodes the instance of the rewritten graph \n" query += "MATCH {}".format( ", ".join([ "({}:{} {{id: '{}'}})".format(k, graph_id, v) for k, v in match_instance_vars.items() ]) ) query += "\n\n" carry_vars = list(lhs_vars.values()) for k, v in lhs_vars.items(): query += ( "OPTIONAL MATCH (n)-[:typing*1..]->({})\n".format(v) + "WITH {} \n".format( ", ".join(carry_vars + [ "collect({{type:'node', origin: {}.id, id: n.id, graph:labels(n)[0], attrs: properties(n)}}) as {}_dict\n".format( v, v)]) ) ) carry_vars.append("{}_dict".format(v)) # Match edges for (u, v) in rule.lhs.edges(): edge_var = "{}_{}".format(lhs_vars[u], lhs_vars[v]) query += "OPTIONAL MATCH ({}_instance)-[{}:edge]->({}_instance)\n".format( lhs_vars[u], edge_var, lhs_vars[v]) query += "WHERE ({})-[:typing*1..]->({}) AND ({})-[:typing*1..]->({})\n".format( "{}_instance".format(lhs_vars[u]), lhs_vars[u], "{}_instance".format(lhs_vars[v]), lhs_vars[v]) query += ( "WITH {} \n".format( ", ".join(carry_vars + [ "collect({{type: 'edge', source: {}.id, target: {}.id, attrs: properties({}), graph:labels({})[0]}}) as {}\n".format( "{}_instance".format(lhs_vars[u]), "{}_instance".format(lhs_vars[v]), edge_var, "{}_instance".format(lhs_vars[u]), edge_var) ]) ) ) carry_vars.append(edge_var) query += "RETURN {}".format( ", ".join( ["{}_dict as {}".format(v, v) for v in lhs_vars.values()] + ["{}_{}".format(lhs_vars[u], lhs_vars[v]) for u, v in rule.lhs.edges()])) result = tx.run(query) record = result.single() l_g_ls = {} lhs_nodes = {} lhs_edges = {} for k, v in record.items(): if len(v) > 0: if v[0]["type"] == "node": for el in v: if el["graph"] not in lhs_nodes: lhs_nodes[el["graph"]] = [] l_g_ls[el["graph"]] = {} l_g_ls[el["graph"]][el["id"]] = keys_by_value( instance, el["origin"])[0] # compute attr intersection attrs = attrs_intersection( generic.convert_props_to_attrs(el["attrs"]), get_node(rule.lhs, l_g_ls[el["graph"]][el["id"]])) lhs_nodes[el["graph"]].append((el["id"], attrs)) else: for el in v: if el["graph"] not in lhs_edges: lhs_edges[el["graph"]] = [] # compute attr intersection attrs = attrs_intersection( generic.convert_props_to_attrs(el["attrs"]), get_edge( rule.lhs, l_g_ls[el["graph"]][el["source"]], l_g_ls[el["graph"]][el["target"]])) lhs_edges[el["graph"]].append( (el["source"], el["target"], attrs) ) for graph, nodes in lhs_nodes.items(): lhs = nx.DiGraph() add_nodes_from(lhs, nodes) if graph in lhs_edges: add_edges_from( lhs, lhs_edges[graph]) p, p_lhs, p_g_p = pullback( lhs, rule.p, rule.lhs, l_g_ls[graph], rule.p_lhs) l_g_g = {n[0]: n[0] for n in nodes} # Remove controlled things from P_G if graph in p_typing.keys(): l_g_factorization = { keys_by_value(l_g_g, k)[0]: v for k, v in p_typing[graph].items() } p_g_nodes_to_remove = set() for n in p.nodes(): l_g_node = p_lhs[n] # If corresponding L_G node is specified in # the controlling relation, remove all # the instances of P nodes not mentioned # in this relations if l_g_node in l_g_factorization.keys(): p_nodes = l_g_factorization[l_g_node] if p_g_p[n] not in p_nodes: del p_g_p[n] del p_lhs[n] p_g_nodes_to_remove.add(n) for n in p_g_nodes_to_remove: p.remove_node(n) liftings[graph] = { "rule": Rule(p=p, lhs=lhs, p_lhs=p_lhs), "instance": l_g_g, "l_g_l": l_g_ls[graph], "p_g_p": p_g_p } else: query = generic.ancestors_query(graph_id, "graph", "homomorphism") result = tx.run(query) ancestors = [record["ancestor"] for record in result] for a in ancestors: liftings[a] = { "rule": Rule.identity_rule(), "instance": {}, "l_g_l": {}, "p_g_p": {} } return liftings