def pullback(b, c, d, b_d, c_d, inplace=False): """Find the pullback from b -> d <- c. Given h1 : B -> D; h2 : C -> D returns A, rh1, rh2 with rh1 : A -> B; rh2 : A -> C and A the pullback. """ if inplace is True: a = b else: a = type(b)() # Check homomorphisms check_homomorphism(b, d, b_d) check_homomorphism(c, d, c_d) hom1 = {} hom2 = {} f = b_d g = c_d for n1 in b.nodes(): for n2 in c.nodes(): if f[n1] == g[n2]: new_attrs = merge_attributes(b.node[n1], c.node[n2], 'intersection') if n1 not in a.nodes(): add_node(a, n1, new_attrs) hom1[n1] = n1 hom2[n1] = n2 else: i = 1 new_name = str(n1) + str(i) while new_name in a.nodes(): i += 1 new_name = str(n1) + str(i) # if n2 not in a.nodes(): add_node(a, new_name, new_attrs) hom1[new_name] = n1 hom2[new_name] = n2 for n1 in a.nodes(): for n2 in a.nodes(): if (hom1[n1], hom1[n2]) in b.edges() or \ ((not a.is_directed()) and (hom1[n2], hom1[n1]) in b.edges()): if (hom2[n1], hom2[n2]) in c.edges() or \ ((not a.is_directed) and (hom2[n2], hom2[n1]) in c.edges()): add_edge(a, n1, n2) set_edge( a, n1, n2, merge_attributes( get_edge(b, hom1[n1], hom1[n2]), get_edge(c, hom2[n1], hom2[n2]), 'intersection')) check_homomorphism(a, b, hom1) check_homomorphism(a, c, hom2) return (a, hom1, hom2)
def pullback(b, c, d, b_d, c_d, inplace=False): """Find the pullback from b -> d <- c. Given h1 : B -> D; h2 : C -> D returns A, rh1, rh2 with rh1 : A -> B; rh2 : A -> C and A the pullback. """ if inplace is True: a = b else: a = type(b)() # Check homomorphisms check_homomorphism(b, d, b_d) check_homomorphism(c, d, c_d) hom1 = {} hom2 = {} f = b_d g = c_d for n1 in b.nodes(): for n2 in c.nodes(): if f[n1] == g[n2]: new_attrs = merge_attributes(b.node[n1], c.node[n2], 'intersection') if n1 not in a.nodes(): add_node(a, n1, new_attrs) hom1[n1] = n1 hom2[n1] = n2 else: i = 1 new_name = str(n1) + str(i) while new_name in a.nodes(): i += 1 new_name = str(n1) + str(i) # if n2 not in a.nodes(): add_node(a, new_name, new_attrs) hom1[new_name] = n1 hom2[new_name] = n2 for n1 in a.nodes(): for n2 in a.nodes(): if (hom1[n1], hom1[n2]) in b.edges() or \ ((not a.is_directed()) and (hom1[n2], hom1[n1]) in b.edges()): if (hom2[n1], hom2[n2]) in c.edges() or \ ((not a.is_directed) and (hom2[n2], hom2[n1]) in c.edges()): add_edge(a, n1, n2) set_edge( a, n1, n2, merge_attributes(get_edge(b, hom1[n1], hom1[n2]), get_edge(c, hom2[n1], hom2[n2]), 'intersection')) check_homomorphism(a, b, hom1) check_homomorphism(a, c, hom2) return (a, hom1, hom2)
def pushout_from_relation(g1, g2, relation, inplace=False): """Find the pushout from a relation.""" left_dict = left_relation_dict(relation) right_dict = right_relation_dict(relation) if inplace is True: g12 = g1 else: g12 = copy.deepcopy(g1) g1_g12 = id_of(g12.nodes()) g2_g12 = dict() for node in g1.nodes(): if node in left_dict.keys(): for g2_node in left_dict[node]: g2_g12[g2_node] = node for node in g2.nodes(): if node not in right_dict.keys(): add_node(g12, node, g2.node[node]) g2_g12[node] = node elif len(right_dict[node]) == 1: node_attrs_diff = dict_sub( g2.node[node], g1.node[list(right_dict[node])[0]]) add_node_attrs( g12, list(right_dict[node])[0], node_attrs_diff) elif len(right_dict[node]) > 1: new_name = merge_nodes(g12, right_dict[node]) for g1_node in right_dict[node]: g1_g12[g1_node] = new_name g2_g12[node] = new_name node_attrs_diff = dict_sub( g2.node[node], g12.node[new_name]) add_node_attrs(g12, new_name, node_attrs_diff) for u, v in g2.edges(): if (g2_g12[u], g2_g12[v]) not in g12.edges(): add_edge(g12, g2_g12[u], g2_g12[v], get_edge(g2, u, v)) else: edge_attrs_diff = dict_sub( g2.edge[u][v], g12.edge[g2_g12[u]][g2_g12[v]]) add_edge_attrs(g12, g2_g12[u], g2_g12[v], edge_attrs_diff) return (g12, g1_g12, g2_g12)
def pushout_from_relation(g1, g2, relation, inplace=False): """Find the pushout from a relation.""" left_dict = left_relation_dict(relation) right_dict = right_relation_dict(relation) if inplace is True: g12 = g1 else: g12 = copy.deepcopy(g1) g1_g12 = id_of(g12.nodes()) g2_g12 = dict() for node in g1.nodes(): if node in left_dict.keys(): for g2_node in left_dict[node]: g2_g12[g2_node] = node for node in g2.nodes(): if node not in right_dict.keys(): add_node(g12, node, g2.node[node]) g2_g12[node] = node elif len(right_dict[node]) == 1: node_attrs_diff = dict_sub(g2.node[node], g1.node[list(right_dict[node])[0]]) add_node_attrs(g12, list(right_dict[node])[0], node_attrs_diff) elif len(right_dict[node]) > 1: new_name = merge_nodes(g12, right_dict[node]) for g1_node in right_dict[node]: g1_g12[g1_node] = new_name g2_g12[node] = new_name node_attrs_diff = dict_sub(g2.node[node], g12.node[new_name]) add_node_attrs(g12, new_name, node_attrs_diff) for u, v in g2.edges(): if (g2_g12[u], g2_g12[v]) not in g12.edges(): add_edge(g12, g2_g12[u], g2_g12[v], get_edge(g2, u, v)) else: edge_attrs_diff = dict_sub(g2.edge[u][v], g12.edge[g2_g12[u]][g2_g12[v]]) add_edge_attrs(g12, g2_g12[u], g2_g12[v], edge_attrs_diff) return (g12, g1_g12, g2_g12)
def test_refinement(self): graph = NXGraph() prim.add_nodes_from(graph, [ ("a", { "name": "Bob" }), ("b", { "name": "Jane" }), ("c", { "name": "Alice" }), ("d", { "name": "Joe" }), ]) prim.add_edges_from(graph, [("a", "a", { "type": "friends" }), ("a", "b", { "type": "enemies" }), ("c", "a", { "type": "colleages" }), ("d", "a", { "type": "siblings" })]) pattern = NXGraph() pattern.add_nodes_from(["x", "y"]) pattern.add_edges_from([("y", "x")]) instance = {"x": "a", "y": "d"} # Remove node side-effects rule = Rule.from_transform(NXGraph.copy(pattern)) rule.inject_remove_node("x") new_instance = rule.refine(graph, instance) assert (new_instance == {"x": "a", "y": "d", "b": "b", "c": "c"}) assert (prim.get_node(rule.lhs, "x") == prim.get_node(graph, "a")) assert (prim.get_edge(rule.lhs, "x", "b") == prim.get_edge(graph, "a", "b")) assert (prim.get_edge(rule.lhs, "c", "x") == prim.get_edge(graph, "c", "a")) # Remove edge side-effects rule = Rule.from_transform(NXGraph.copy(pattern)) rule.inject_remove_edge("y", "x") new_instance = rule.refine(graph, instance) assert (prim.get_edge(rule.lhs, "y", "x") == prim.get_edge(graph, "d", "a")) # Merge side-effects rule = Rule.from_transform(NXGraph.copy(pattern)) rule.inject_merge_nodes(["x", "y"]) new_instance = rule.refine(graph, instance) assert (new_instance == {"x": "a", "y": "d", "b": "b", "c": "c"}) assert (rule.lhs.get_node("x") == graph.get_node("a")) assert (rule.lhs.get_node("y") == graph.get_node("d")) assert (rule.lhs.get_edge("y", "x") == graph.get_edge("d", "a")) # Combined side-effects # Ex1: Remove cloned edge + merge with some node graph.remove_edge("a", "a") pattern.add_node("z") pattern.add_edge("x", "z") instance["z"] = "b" rule = Rule.from_transform(NXGraph.copy(pattern)) p_node, _ = rule.inject_clone_node("x") rule.inject_remove_node("z") rule.inject_remove_edge("y", p_node) rule.inject_merge_nodes([p_node, "y"]) new_instance = rule.refine(graph, instance) assert (new_instance == {"x": "a", "y": "d", "z": "b", "c": "c"}) assert (prim.get_node(rule.lhs, "x") == prim.get_node(graph, "a")) assert (prim.get_node(rule.lhs, "y") == prim.get_node(graph, "d")) assert (prim.get_edge(rule.lhs, "y", "x") == prim.get_edge(graph, "d", "a")) # test with rule inversion backup = NXGraph.copy(graph) rhs_g = graph.rewrite(rule, new_instance) inverted = rule.get_inverted_rule() rhs_gg = graph.rewrite(inverted, rhs_g) # print(rhs_gg) old_node_labels = {v: new_instance[k] for k, v in rhs_gg.items()} graph.relabel_nodes(old_node_labels) assert (backup == graph)
def get_rule_projections(tx, hierarchy, graph_id, rule, instance, rhs_typing=None): """Execute the query finding rule liftings.""" if rhs_typing is None: rhs_typing = {} projections = {} if rule.is_relaxing(): if len(rule.lhs.nodes()) > 0: lhs_instance = { n: instance[n] for n in rule.lhs.nodes() } lhs_vars = { n: n for n in rule.lhs.nodes()} match_instance_vars = { v: lhs_instance[k] for k, v in lhs_vars.items() } # Match nodes query = "// Match nodes the instance of the rewritten graph \n" query += "MATCH {}".format( ", ".join([ "({}:{} {{id: '{}'}})".format(k, graph_id, v) for k, v in match_instance_vars.items() ]) ) query += "\n\n" carry_vars = list(lhs_vars.values()) for k, v in lhs_vars.items(): query += ( "OPTIONAL MATCH (n)<-[:typing*1..]-({})\n".format(v) + "WITH {} \n".format( ", ".join( carry_vars + ["collect(DISTINCT {{type:'node', origin: {}.id, id: n.id, graph:labels(n)[0], attrs: properties(n)}}) as {}_dict\n".format( v, v)]) ) ) carry_vars.append("{}_dict".format(v)) # Match edges for (u, v) in rule.p.edges(): edge_var = "{}_{}".format(lhs_vars[u], lhs_vars[v]) query += "OPTIONAL MATCH ({}_instance)-[{}:edge]->({}_instance)\n".format( lhs_vars[u], edge_var, lhs_vars[v]) query += "WHERE ({})<-[:typing*1..]-({}) AND ({})<-[:typing*1..]-({})\n".format( "{}_instance".format(lhs_vars[u]), lhs_vars[u], "{}_instance".format(lhs_vars[v]), lhs_vars[v]) query += ( "WITH {} \n".format( ", ".join(carry_vars + [ "collect({{type: 'edge', source: {}.id, target: {}.id, graph:labels({})[0], attrs: properties({})}}) as {}\n".format( "{}_instance".format(lhs_vars[u]), "{}_instance".format(lhs_vars[v]), "{}_instance".format(lhs_vars[u]), edge_var, edge_var) ]) ) ) carry_vars.append(edge_var) query += "RETURN {}".format( ", ".join( ["{}_dict as {}".format(v, v) for v in lhs_vars.values()] + ["{}_{}".format(lhs_vars[u], lhs_vars[v]) for u, v in rule.p.edges()])) result = tx.run(query) record = result.single() l_l_ts = {} l_nodes = {} l_edges = {} for k, v in record.items(): if len(v) > 0: if v[0]["type"] == "node": for el in v: l_node = keys_by_value(instance, el["origin"])[0] if el["graph"] not in l_nodes: l_nodes[el["graph"]] = {} l_l_ts[el["graph"]] = {} if el["id"] not in l_nodes[el["graph"]]: l_nodes[el["graph"]][el["id"]] = {} l_nodes[el["graph"]][el["id"]] = attrs_union( l_nodes[el["graph"]][el["id"]], attrs_intersection( generic.convert_props_to_attrs(el["attrs"]), get_node(rule.lhs, l_node))) l_l_ts[el["graph"]][l_node] = el["id"] else: for el in v: l_sources = keys_by_value(l_l_ts[el["graph"]], el["source"]) l_targets = keys_by_value(l_l_ts[el["graph"]], el["target"]) for l_source in l_sources: for l_target in l_targets: if exists_edge(rule.l, l_source, l_target): if el["graph"] not in l_edges: l_edges[el["graph"]] = {} if (el["source"], el["target"]) not in l_edges[el["graph"]]: l_edges[el["graph"]][(el["source"], el["target"])] = {} l_edges[el["graph"]][(el["source"], el["target"])] =\ attrs_union( l_edges[el["graph"]][(el["source"], el["target"])], attrs_intersection( generic.convert_props_to_attrs(el["attrs"]), get_edge(rule.lhs, l_source, l_target))) for graph, typing in hierarchy.get_descendants(graph_id).items(): if graph in l_nodes: nodes = l_nodes[graph] else: nodes = {} if graph in l_edges: edges = l_edges[graph] else: edges = {} l = nx.DiGraph() add_nodes_from(l, [(k, v) for k, v in nodes.items()]) if graph in l_edges: add_edges_from( l, [(s, t, v) for (s, t), v in edges.items()]) rhs, p_rhs, r_r_t = pushout( rule.p, l, rule.rhs, compose(rule.p_lhs, l_l_ts[graph]), rule.p_rhs) l_t_t = {n: n for n in nodes} # Modify P_T and R_T according to the controlling # relation rhs_typing if graph in rhs_typing.keys(): r_t_factorization = { r_r_t[k]: v for k, v in rhs_typing[graph].items() } added_t_nodes = set() for n in rhs.nodes(): if n in r_t_factorization.keys(): # If corresponding R_T node is specified in # the controlling relation add nodes of T # that type it to P t_nodes = r_t_factorization[n] for t_node in t_nodes: if t_node not in l_t_t.values() and\ t_node not in added_t_nodes: new_p_node = generate_new_id( l.nodes(), t_node) l.add_node(new_p_node) added_t_nodes.add(t_node) p_rhs[new_p_node] = n l_t_t[new_p_node] = t_node else: p_rhs[keys_by_value(l_t_t, t_node)[0]] = n projections[graph] = { "rule": Rule(p=l, rhs=rhs, p_rhs=p_rhs), "instance": l_t_t, "l_l_t": l_l_ts[graph], "p_p_t": {k: l_l_ts[graph][v] for k, v in rule.p_lhs.items()}, "r_r_t": r_r_t } return projections
def get_rule_liftings(tx, graph_id, rule, instance, p_typing=None): """Execute the query finding rule liftings.""" if p_typing is None: p_typing = {} liftings = {} if len(rule.lhs.nodes()) > 0: lhs_vars = { n: n for n in rule.lhs.nodes()} match_instance_vars = {lhs_vars[k]: v for k, v in instance.items()} # Match nodes query = "// Match nodes the instance of the rewritten graph \n" query += "MATCH {}".format( ", ".join([ "({}:{} {{id: '{}'}})".format(k, graph_id, v) for k, v in match_instance_vars.items() ]) ) query += "\n\n" carry_vars = list(lhs_vars.values()) for k, v in lhs_vars.items(): query += ( "OPTIONAL MATCH (n)-[:typing*1..]->({})\n".format(v) + "WITH {} \n".format( ", ".join(carry_vars + [ "collect({{type:'node', origin: {}.id, id: n.id, graph:labels(n)[0], attrs: properties(n)}}) as {}_dict\n".format( v, v)]) ) ) carry_vars.append("{}_dict".format(v)) # Match edges for (u, v) in rule.lhs.edges(): edge_var = "{}_{}".format(lhs_vars[u], lhs_vars[v]) query += "OPTIONAL MATCH ({}_instance)-[{}:edge]->({}_instance)\n".format( lhs_vars[u], edge_var, lhs_vars[v]) query += "WHERE ({})-[:typing*1..]->({}) AND ({})-[:typing*1..]->({})\n".format( "{}_instance".format(lhs_vars[u]), lhs_vars[u], "{}_instance".format(lhs_vars[v]), lhs_vars[v]) query += ( "WITH {} \n".format( ", ".join(carry_vars + [ "collect({{type: 'edge', source: {}.id, target: {}.id, attrs: properties({}), graph:labels({})[0]}}) as {}\n".format( "{}_instance".format(lhs_vars[u]), "{}_instance".format(lhs_vars[v]), edge_var, "{}_instance".format(lhs_vars[u]), edge_var) ]) ) ) carry_vars.append(edge_var) query += "RETURN {}".format( ", ".join( ["{}_dict as {}".format(v, v) for v in lhs_vars.values()] + ["{}_{}".format(lhs_vars[u], lhs_vars[v]) for u, v in rule.lhs.edges()])) result = tx.run(query) record = result.single() l_g_ls = {} lhs_nodes = {} lhs_edges = {} for k, v in record.items(): if len(v) > 0: if v[0]["type"] == "node": for el in v: if el["graph"] not in lhs_nodes: lhs_nodes[el["graph"]] = [] l_g_ls[el["graph"]] = {} l_g_ls[el["graph"]][el["id"]] = keys_by_value( instance, el["origin"])[0] # compute attr intersection attrs = attrs_intersection( generic.convert_props_to_attrs(el["attrs"]), get_node(rule.lhs, l_g_ls[el["graph"]][el["id"]])) lhs_nodes[el["graph"]].append((el["id"], attrs)) else: for el in v: if el["graph"] not in lhs_edges: lhs_edges[el["graph"]] = [] # compute attr intersection attrs = attrs_intersection( generic.convert_props_to_attrs(el["attrs"]), get_edge( rule.lhs, l_g_ls[el["graph"]][el["source"]], l_g_ls[el["graph"]][el["target"]])) lhs_edges[el["graph"]].append( (el["source"], el["target"], attrs) ) for graph, nodes in lhs_nodes.items(): lhs = nx.DiGraph() add_nodes_from(lhs, nodes) if graph in lhs_edges: add_edges_from( lhs, lhs_edges[graph]) p, p_lhs, p_g_p = pullback( lhs, rule.p, rule.lhs, l_g_ls[graph], rule.p_lhs) l_g_g = {n[0]: n[0] for n in nodes} # Remove controlled things from P_G if graph in p_typing.keys(): l_g_factorization = { keys_by_value(l_g_g, k)[0]: v for k, v in p_typing[graph].items() } p_g_nodes_to_remove = set() for n in p.nodes(): l_g_node = p_lhs[n] # If corresponding L_G node is specified in # the controlling relation, remove all # the instances of P nodes not mentioned # in this relations if l_g_node in l_g_factorization.keys(): p_nodes = l_g_factorization[l_g_node] if p_g_p[n] not in p_nodes: del p_g_p[n] del p_lhs[n] p_g_nodes_to_remove.add(n) for n in p_g_nodes_to_remove: p.remove_node(n) liftings[graph] = { "rule": Rule(p=p, lhs=lhs, p_lhs=p_lhs), "instance": l_g_g, "l_g_l": l_g_ls[graph], "p_g_p": p_g_p } else: query = generic.ancestors_query(graph_id, "graph", "homomorphism") result = tx.run(query) ancestors = [record["ancestor"] for record in result] for a in ancestors: liftings[a] = { "rule": Rule.identity_rule(), "instance": {}, "l_g_l": {}, "p_g_p": {} } return liftings
def pullback_complement(a, b, d, a_b, b_d, inplace=False): """Find the final pullback complement from a->b->d. Makes changes to d inplace. """ check_homomorphism(a, b, a_b, total=True) check_homomorphism(b, d, b_d, total=True) if not is_monic(b_d): raise InvalidHomomorphism( "Second homomorphism is not monic, " "cannot find final pullback complement!" ) if inplace is True: c = d else: c = copy.deepcopy(d) a_c = dict() c_d = id_of(c.nodes()) # Remove/clone nodes for b_node in b.nodes(): a_keys = keys_by_value(a_b, b_node) # Remove nodes if len(a_keys) == 0: remove_node(c, b_d[b_node]) del c_d[b_d[b_node]] # Keep nodes elif len(a_keys) == 1: a_c[a_keys[0]] = b_d[b_node] # Clone nodes else: i = 1 for k in a_keys: if i == 1: a_c[k] = b_d[b_node] c_d[b_d[b_node]] = b_d[b_node] else: new_name = clone_node(c, b_d[b_node]) a_c[k] = new_name c_d[new_name] = b_d[b_node] i += 1 # Remove edges for (b_n1, b_n2) in b.edges(): a_keys_1 = keys_by_value(a_b, b_n1) a_keys_2 = keys_by_value(a_b, b_n2) if len(a_keys_1) > 0 and len(a_keys_2) > 0: for k1 in a_keys_1: for k2 in a_keys_2: if d.is_directed(): if (k1, k2) not in a.edges() and\ (a_c[k1], a_c[k2]) in c.edges(): remove_edge(c, a_c[k1], a_c[k2]) else: if (k1, k2) not in a.edges() and\ (k2, k1) not in a.edges(): if (a_c[k1], a_c[k2]) in d.edges() or\ (a_c[k2], a_c[k1]) in d.edges(): remove_edge(c, a_c[k1], a_c[k2]) # Remove node attrs for a_node in a.nodes(): attrs_to_remove = dict_sub( b.node[a_b[a_node]], a.node[a_node] ) remove_node_attrs(c, a_c[a_node], attrs_to_remove) # removed_node_attrs[a_c[a_node]] = attrs_to_remove # Remove edge attrs for (n1, n2) in a.edges(): attrs_to_remove = dict_sub( get_edge(b, a_b[n1], a_b[n2]), get_edge(a, n1, n2) ) remove_edge_attrs(c, a_c[n1], a_c[n2], attrs_to_remove) # removed_edge_attrs[(a_c[n1], a_c[n2])] = attrs_to_remove return (c, a_c, c_d)
def pushout(a, b, c, a_b, a_c, inplace=False): """Find the pushour of the span b <- a -> c.""" check_homomorphism(a, b, a_b) check_homomorphism(a, c, a_c) if inplace is True: d = b else: d = copy.deepcopy(b) b_d = id_of(b.nodes()) c_d = dict() # Add/merge nodes for c_n in c.nodes(): a_keys = keys_by_value(a_c, c_n) # Add nodes if len(a_keys) == 0: add_node(d, c_n, c.node[c_n]) c_d[c_n] = c_n # Keep nodes elif len(a_keys) == 1: c_d[a_c[a_keys[0]]] = a_b[a_keys[0]] # Merge nodes else: nodes_to_merge = [] for k in a_keys: nodes_to_merge.append(a_b[k]) new_name = merge_nodes(d, nodes_to_merge) c_d[c_n] = new_name for node in nodes_to_merge: b_d[node] = new_name # Add edges for (n1, n2) in c.edges(): if b.is_directed(): if (c_d[n1], c_d[n2]) not in d.edges(): add_edge( d, c_d[n1], c_d[n2], get_edge(c, n1, n2)) else: if (c_d[n1], c_d[n2]) not in d.edges() and\ (c_d[n2], c_d[n1]) not in d.edges(): add_edge( d, c_d[n1], c_d[n2], get_edge(c, n1, n2) ) # Add node attrs for c_n in c.nodes(): a_keys = keys_by_value(a_c, c_n) # Add attributes to the nodes which stayed invariant if len(a_keys) == 1: attrs_to_add = dict_sub( c.node[c_n], a.node[a_keys[0]] ) add_node_attrs(d, c_d[c_n], attrs_to_add) # Add attributes to the nodes which were merged elif len(a_keys) > 1: merged_attrs = {} for k in a_keys: merged_attrs = merge_attributes( merged_attrs, a.node[k] ) attrs_to_add = dict_sub(c.node[c_n], merged_attrs) add_node_attrs(d, c_d[c_n], attrs_to_add) # Add edge attrs for (n1, n2) in c.edges(): d_n1 = c_d[n1] d_n2 = c_d[n2] if d.is_directed(): attrs_to_add = dict_sub( get_edge(c, n1, n2), get_edge(d, d_n1, d_n2) ) add_edge_attrs( d, c_d[n1], c_d[n2], attrs_to_add ) else: attrs_to_add = dict_sub( get_edge(c, n1, n2), get_edge(d, d_n1, d_n2) ) add_edge_attrs( d, c_d[n1], c_d[n2], attrs_to_add ) return (d, b_d, c_d)
def pullback_complement(a, b, d, a_b, b_d, inplace=False): """Find the final pullback complement from a->b->d. Makes changes to d inplace. """ check_homomorphism(a, b, a_b, total=True) check_homomorphism(b, d, b_d, total=True) if not is_monic(b_d): raise InvalidHomomorphism("Second homomorphism is not monic, " "cannot find final pullback complement!") if inplace is True: c = d else: c = copy.deepcopy(d) a_c = dict() c_d = id_of(c.nodes()) # Remove/clone nodes for b_node in b.nodes(): a_keys = keys_by_value(a_b, b_node) # Remove nodes if len(a_keys) == 0: remove_node(c, b_d[b_node]) del c_d[b_d[b_node]] # Keep nodes elif len(a_keys) == 1: a_c[a_keys[0]] = b_d[b_node] # Clone nodes else: i = 1 for k in a_keys: if i == 1: a_c[k] = b_d[b_node] c_d[b_d[b_node]] = b_d[b_node] else: new_name = clone_node(c, b_d[b_node]) a_c[k] = new_name c_d[new_name] = b_d[b_node] i += 1 # Remove edges for (b_n1, b_n2) in b.edges(): a_keys_1 = keys_by_value(a_b, b_n1) a_keys_2 = keys_by_value(a_b, b_n2) if len(a_keys_1) > 0 and len(a_keys_2) > 0: for k1 in a_keys_1: for k2 in a_keys_2: if d.is_directed(): if (k1, k2) not in a.edges() and\ (a_c[k1], a_c[k2]) in c.edges(): remove_edge(c, a_c[k1], a_c[k2]) else: if (k1, k2) not in a.edges() and\ (k2, k1) not in a.edges(): if (a_c[k1], a_c[k2]) in d.edges() or\ (a_c[k2], a_c[k1]) in d.edges(): remove_edge(c, a_c[k1], a_c[k2]) # Remove node attrs for a_node in a.nodes(): attrs_to_remove = dict_sub(b.node[a_b[a_node]], a.node[a_node]) remove_node_attrs(c, a_c[a_node], attrs_to_remove) # removed_node_attrs[a_c[a_node]] = attrs_to_remove # Remove edge attrs for (n1, n2) in a.edges(): attrs_to_remove = dict_sub(get_edge(b, a_b[n1], a_b[n2]), get_edge(a, n1, n2)) remove_edge_attrs(c, a_c[n1], a_c[n2], attrs_to_remove) # removed_edge_attrs[(a_c[n1], a_c[n2])] = attrs_to_remove return (c, a_c, c_d)
def pushout(a, b, c, a_b, a_c, inplace=False): """Find the pushour of the span b <- a -> c.""" check_homomorphism(a, b, a_b) check_homomorphism(a, c, a_c) if inplace is True: d = b else: d = copy.deepcopy(b) b_d = id_of(b.nodes()) c_d = dict() # Add/merge nodes for c_n in c.nodes(): a_keys = keys_by_value(a_c, c_n) # Add nodes if len(a_keys) == 0: add_node(d, c_n, c.node[c_n]) c_d[c_n] = c_n # Keep nodes elif len(a_keys) == 1: c_d[a_c[a_keys[0]]] = a_b[a_keys[0]] # Merge nodes else: nodes_to_merge = [] for k in a_keys: nodes_to_merge.append(a_b[k]) new_name = merge_nodes(d, nodes_to_merge) c_d[c_n] = new_name for node in nodes_to_merge: b_d[node] = new_name # Add edges for (n1, n2) in c.edges(): if b.is_directed(): if (c_d[n1], c_d[n2]) not in d.edges(): add_edge(d, c_d[n1], c_d[n2], get_edge(c, n1, n2)) else: if (c_d[n1], c_d[n2]) not in d.edges() and\ (c_d[n2], c_d[n1]) not in d.edges(): add_edge(d, c_d[n1], c_d[n2], get_edge(c, n1, n2)) # Add node attrs for c_n in c.nodes(): a_keys = keys_by_value(a_c, c_n) # Add attributes to the nodes which stayed invariant if len(a_keys) == 1: attrs_to_add = dict_sub(c.node[c_n], a.node[a_keys[0]]) add_node_attrs(d, c_d[c_n], attrs_to_add) # Add attributes to the nodes which were merged elif len(a_keys) > 1: merged_attrs = {} for k in a_keys: merged_attrs = merge_attributes(merged_attrs, a.node[k]) attrs_to_add = dict_sub(c.node[c_n], merged_attrs) add_node_attrs(d, c_d[c_n], attrs_to_add) # Add edge attrs for (n1, n2) in c.edges(): d_n1 = c_d[n1] d_n2 = c_d[n2] if d.is_directed(): attrs_to_add = dict_sub(get_edge(c, n1, n2), get_edge(d, d_n1, d_n2)) add_edge_attrs(d, c_d[n1], c_d[n2], attrs_to_add) else: attrs_to_add = dict_sub(get_edge(c, n1, n2), get_edge(d, d_n1, d_n2)) add_edge_attrs(d, c_d[n1], c_d[n2], attrs_to_add) return (d, b_d, c_d)