def paste_nodes(hie, top, graph_id, parent_path, nodes, mouse_x, mouse_y): """paste the selected nodes from graph at parent_path to graph_id""" path_list = [s for s in parent_path.split("/") if s and not s.isspace()] other_id = child_from_path(hie, top, path_list) gr = hie.node[graph_id].graph other_gr = hie.node[other_id].graph old_to_new = {} # check that all copied nodes exist in the graph for node in nodes: if node not in other_gr: raise ValueError( "copied node {} does not exist anymore".format(node)) if hie.has_edge(graph_id, other_id): mapping = hie.edge[graph_id][other_id].mapping for node in nodes: n_id = prim.unique_node_id(gr, node) prim.add_node(gr, n_id, other_gr.node[node]) old_to_new[node] = n_id mapping[n_id] = node for (source, target) in other_gr.subgraph(nodes).edges(): prim.add_edge(gr, old_to_new[source], old_to_new[target], other_gr.edge[source][target]) else: # check that all necessary typings are there necessary_typings = [typing for typing in hie.successors(graph_id)] # if hie.edge[graph_id][typing].total] # until UI can handle partial typings typings = [typing for typing in hie.successors(graph_id) if typing in hie.successors(other_id)] for typing in necessary_typings: if typing not in typings: raise ValueError("copied nodes not typed by {}".format(typing)) for node in nodes: if node not in hie.edge[other_id][typing].mapping: raise ValueError("copied node {} is not typed by {}" .format(node, typing)) for node in nodes: node_id = prim.unique_node_id(gr, node) old_to_new[node] = node_id prim.add_node(gr, node_id, other_gr.node[node]) for typing in typings: other_mapping = hie.edge[other_id][typing].mapping if node in other_mapping: hie.edge[graph_id][typing].mapping[old_to_new[node]] =\ other_mapping[node] for (source, target) in other_gr.subgraph(nodes).edges(): prim.add_edge(gr, old_to_new[source], old_to_new[target], other_gr.edge[source][target]) if "positions" in hie.node[other_id].attrs: if "positions" not in hie.node[graph_id].attrs: hie.node[graph_id].attrs["positions"] = {} positions_old = hie.node[other_id].attrs["positions"] positions_new = hie.node[graph_id].attrs["positions"] add_positions(mouse_x, mouse_y, positions_old, positions_new, old_to_new)
def _add_node_lhs(self, node_id, attrs=None): if node_id not in self.lhs.nodes(): primitives.add_node(self.lhs, node_id, attrs) new_p_node_id = node_id if new_p_node_id in self.p.nodes(): new_p_node_id = primitives.unique_node_id(new_p_node_id) primitives.add_node(self.p, new_p_node_id, attrs) self.p_lhs[new_p_node_id] = node_id new_rhs_node_id = node_id if new_rhs_node_id in self.rhs.nodes(): new_rhs_node_id = primitives.unique_node_id(new_rhs_node_id) primitives.add_node(self.rhs, new_rhs_node_id, attrs) self.p_rhs[new_p_node_id] = new_rhs_node_id else: raise RuleError( "Node '%s' already exists in the left-hand side " "of the rule" % node_id)
def merge_graphs(hie, g_id, name1, name2, mapping, new_name): """ merge two graph based on an identity relation between their nodes. We first build a span from the given relation. The new graph is then computed as the pushout. """ new_name = get_valid_name(hie, g_id, new_name) id1 = child_from_name(hie, g_id, name1) id2 = child_from_name(hie, g_id, name2) g1 = hie.node[id1].graph g2 = hie.node[id2].graph # build the span from the relation if hie.directed: g0 = nx.DiGraph() else: g0 = nx.Graph() left_mapping = {} right_mapping = {} for (n1, n2) in mapping: new_node = unique_node_id(g0, n2) prim.add_node(g0, new_node) left_mapping[new_node] = n1 right_mapping[new_node] = n2 # compute the pushout (new_graph, g1_new_graph, g2_new_graph) = \ pushout(g0, g1, g2, left_mapping, right_mapping) new_id = hie.unique_graph_id(new_name) new_attrs = _new_merged_graph_attrs(hie, id1, id2, new_name) hie.add_graph(new_id, new_graph, new_attrs) # recover the typings of the new pushout graph g1_typings = {t: hie.edge[id1][t] for t in hie.successors(id1)} g2_typings = {t: hie.edge[id2][t] for t in hie.successors(id2)} new_typings = typings_of_pushout(g1, g2, new_graph, g1_new_graph, g2_new_graph, g1_typings, g2_typings) for (typ_id, (typ_mapping, typ_total)) in new_typings.items(): hie.add_typing(new_id, typ_id, typ_mapping, total=typ_total) # recover the typings of children by the new pushout graph new_id1 = _copy_graph(hie, new_id) for child in all_children(hie, id1): hie.add_edge(child, new_id1) tmp_typ = Typing(g1_new_graph, total=hie.edge[child][id1].all_total()) hie.edge[child][new_id1] = tmp_typ * hie.edge[child][id1] new_id2 = _copy_graph(hie, new_id) for child in all_children(hie, id2): hie.add_edge(child, new_id2) tmp_typ = Typing(g2_new_graph, total=hie.edge[child][id2].all_total()) hie.edge[child][new_id2] = tmp_typ * hie.edge[child][id2] _merge_hierarchy(hie, hie, new_id, new_id1) _merge_hierarchy(hie, hie, new_id, new_id2) hie.remove_node(new_id1) hie.remove_node(new_id2)
def link_components(hie, g_id, comp1, comp2, kami_id): """ link two componenst together with brk, bnd""" typing = hie.edge[g_id][kami_id].mapping graph = hie.node[g_id].graph print(graph) bnd_name = unique_node_id(graph, "bnd %s-%s" % (comp1, comp2)) typing[bnd_name] = "bnd" add_node(graph, bnd_name) #brk_name = unique_node_id(graph, "brk") #add_node(graph, brk_name) #typing[brk_name] = "brk" #loc1 = unique_node_id(graph, "loc") #add_node(graph, loc1) #typing[loc1] = "locus" #loc2 = unique_node_id(graph, "loc") #add_node(graph, loc2) #typing[loc2] = "locus" add_edge(graph, comp1, bnd_name) add_edge(graph, comp2, bnd_name) #add_edge(graph, loc1, comp1) #add_edge(graph, loc1, bnd_name) #add_edge(graph, loc1, brk_name) #add_edge(graph, loc2, comp2) #add_edge(graph, loc2, bnd_name) #add_edge(graph, loc2, brk_name) if "positions" in hie.node[g_id].attrs: positions = hie.node[g_id].attrs["positions"] if comp1 in positions.keys(): xpos1 = positions[comp1].get("x", 0) ypos1 = positions[comp1].get("y", 0) else: (xpos1, ypos1) = (0, 0) if comp2 in positions.keys(): xpos2 = positions[comp2].get("x", 0) ypos2 = positions[comp2].get("y", 0) else: (xpos2, ypos2) = (0, 0) difx = xpos2 - xpos1 dify = ypos2 - ypos1 if (difx, dify) != (0, 0): distance = sqrt(difx * difx + dify * dify) vect = (difx / distance, dify / distance) #positions[loc1] = {"x": xpos1+vect[0]*distance/3, # "y": ypos1+vect[1]*distance/3} #positions[loc2] = {"x": xpos1+vect[0]*distance/3*2, # "y": ypos1+vect[1]*distance/3*2} positions[bnd_name] = { "x": (xpos1 + vect[0] * distance / 2), # + #vect[1]*60), "y": (ypos1 + vect[1] * distance / 2) } # -
def add_node(hie, g_id, parent, node_id, node_type, new_name=False): """add a node to a graph in the hierarchy""" # if parent is not None and node_type is None: # raise ValueError("node {} must have a type".format(node_id)) if isinstance(hie.node[g_id], GraphNode): if node_id in hie.node[g_id].graph.nodes(): if new_name: node_id = prim.unique_node_id(hie.node[g_id].graph, node_id) else: raise ValueError( "node {} already exists in graph".format(node_id)) # check that we have sufficient typings for typing in hie.successors(g_id): if hie.edge[g_id][typing].total: if typing != parent or (typing == parent and node_type is None): raise ValueError( "new node must be typed by {}".format(typing)) prim.add_node(hie.node[g_id].graph, node_id) if parent is not None and node_type is not None: hie.edge[g_id][parent].mapping[node_id] = node_type return node_id elif isinstance(hie.node[g_id], RuleNode): tmp_rule = copy.deepcopy(hie.node[g_id].rule) tmp_rule.add_node(node_id) typings = [(hie.node[typing].graph, hie.edge[g_id][typing]) for _, typing in hie.out_edges(g_id) if typing != parent] # tmp_parent_typing = copy.deepcopy(hie.edge[g_id][parent]) # tmp_parent_typing.rhs_mapping[node_id] = node_type if parent is not None and node_type is not None: parent_typing = copy.deepcopy(hie.edge[g_id][parent]) if node_type is not None: parent_typing.rhs_mapping[node_id] = node_type typings.append((hie.node[parent].graph, parent_typing)) check_rule_typings(typings, tmp_rule) hie.node[g_id].rule = tmp_rule if parent is not None: hie.edge[g_id][parent] = parent_typing return node_id else: raise ValueError("node is neither a rule nor a graph")
def add_node(hie, g_id, parent, node_id, node_type, new_name=False): """add a node to a graph in the hierarchy""" # if parent is not None and node_type is None: # raise ValueError("node {} must have a type".format(node_id)) if isinstance(hie.node[g_id], GraphNode): if node_id in hie.node[g_id].graph.nodes(): if new_name: node_id = prim.unique_node_id(hie.node[g_id].graph, node_id) else: raise ValueError( "node {} already exists in graph".format(node_id)) # check that we have sufficient typings for typing in hie.successors(g_id): if hie.edge[g_id][typing].total: if typing != parent or (typing == parent and node_type is None): raise ValueError("new node must be typed by {}" .format(typing)) prim.add_node(hie.node[g_id].graph, node_id) if parent is not None and node_type is not None: hie.edge[g_id][parent].mapping[node_id] = node_type return node_id elif isinstance(hie.node[g_id], RuleNode): tmp_rule = copy.deepcopy(hie.node[g_id].rule) tmp_rule.add_node(node_id) typings = [(hie.node[typing].graph, hie.edge[g_id][typing]) for _, typing in hie.out_edges(g_id) if typing != parent] # tmp_parent_typing = copy.deepcopy(hie.edge[g_id][parent]) # tmp_parent_typing.rhs_mapping[node_id] = node_type if parent is not None and node_type is not None: parent_typing = copy.deepcopy(hie.edge[g_id][parent]) if node_type is not None: parent_typing.rhs_mapping[node_id] = node_type typings.append((hie.node[parent].graph, parent_typing)) check_rule_typings(typings, tmp_rule) hie.node[g_id].rule = tmp_rule if parent is not None: hie.edge[g_id][parent] = parent_typing return node_id else: raise ValueError("node is neither a rule nor a graph")
def paste_nodes(hie, top, graph_id, parent_path, nodes, mouse_x, mouse_y): """paste the selected nodes from graph at parent_path to graph_id""" path_list = [s for s in parent_path.split("/") if s and not s.isspace()] other_id = child_from_path(hie, top, path_list) gr = hie.node[graph_id].graph other_gr = hie.node[other_id].graph old_to_new = {} # check that all copied nodes exist in the graph for node in nodes: if node not in other_gr: raise ValueError( "copied node {} does not exist anymore".format(node)) if hie.has_edge(graph_id, other_id): mapping = hie.edge[graph_id][other_id].mapping for node in nodes: n_id = prim.unique_node_id(gr, node) prim.add_node(gr, n_id, other_gr.node[node]) old_to_new[node] = n_id mapping[n_id] = node for (source, target) in other_gr.subgraph(nodes).edges(): prim.add_edge(gr, old_to_new[source], old_to_new[target], other_gr.edge[source][target]) else: # check that all necessary typings are there necessary_typings = [typing for typing in hie.successors(graph_id)] # if hie.edge[graph_id][typing].total] # until UI can handle partial typings typings = [ typing for typing in hie.successors(graph_id) if typing in hie.successors(other_id) ] for typing in necessary_typings: if typing not in typings: raise ValueError("copied nodes not typed by {}".format(typing)) for node in nodes: if node not in hie.edge[other_id][typing].mapping: raise ValueError( "copied node {} is not typed by {}".format( node, typing)) for node in nodes: node_id = prim.unique_node_id(gr, node) old_to_new[node] = node_id prim.add_node(gr, node_id, other_gr.node[node]) for typing in typings: other_mapping = hie.edge[other_id][typing].mapping if node in other_mapping: hie.edge[graph_id][typing].mapping[old_to_new[node]] =\ other_mapping[node] for (source, target) in other_gr.subgraph(nodes).edges(): prim.add_edge(gr, old_to_new[source], old_to_new[target], other_gr.edge[source][target]) if "positions" in hie.node[other_id].attrs: if "positions" not in hie.node[graph_id].attrs: hie.node[graph_id].attrs["positions"] = {} positions_old = hie.node[other_id].attrs["positions"] positions_new = hie.node[graph_id].attrs["positions"] add_positions(mouse_x, mouse_y, positions_old, positions_new, old_to_new)
def remove_conflict(hie, ag_id, mm_id, locus, suffix=None): """duplicates a locus in order to remove conflicts""" ag_gr = hie.node[ag_id].graph ag_mm = hie.get_typing(ag_id, mm_id) nuggets = [ nug for nug in tree.get_children_id_by_node(hie, ag_id, locus) if hie.node[nug].attrs["type"] == "nugget" ] # Do not merge nodes that are Not valid # As they are removed from the botom graph before the pushout not_valid = [locus] def valid_pullback_node(a, b, c, d, a_b, a_c, b_d, c_d, n): a_d = union_mappings(compose_homomorphisms(b_d, a_b), compose_homomorphisms(c_d, a_c)) return n not in a_d or a_d[n] not in not_valid (pp, pp_ag) = multi_pullback_pushout( ag_gr, [(hie.node[nug].graph, hie.get_typing(nug, ag_id)) for nug in nuggets], valid_pullback_node) adj_nodes = [suc for suc in ag_gr.successors(locus)] + [locus] lhs = ag_gr.subgraph(adj_nodes) new_pp = pp.subgraph(reverse_image(pp_ag, adj_nodes)) # add regions and agents that do not appear in any nuggets to the # preserved part, so we can remove edges from the locus to them to_add = { suc for suc in ag_gr.successors(locus) if ag_mm[suc] in ["region", "agent"] } - set(pp_ag.values()) for node in to_add: node_id = unique_node_id(new_pp, node) add_node(new_pp, node_id) pp_ag[node_id] = node newpp_lhs = restrict_mapping(new_pp.nodes(), pp_ag) # merge loci from preserved part that arr linked to the same other loci def linked_to(loc): """loc being a locus from new_pp, returns the ag loci linked to loc """ adj_acts = { pp_ag[act] for act in new_pp.successors(loc) if ag_mm[pp_ag[act]] not in ["region", "agent"] } return { other_loc for act in adj_acts for other_loc in ag_gr.predecessors(act) if other_loc != locus } # compute equivalence classes of loci loci = [pploc for pploc in new_pp if pp_ag[pploc] == locus] classes = [{pploc} for pploc in loci] partial_eq = [{loc1, loc2} for loc1 in loci for loc2 in loci if loc1 != loc2 and linked_to(loc1) & linked_to(loc2)] for eq in partial_eq: classes = merge_classes(eq, classes) eq_gr = nx.DiGraph() newpp_eq = {} for i, cl in enumerate(classes): eq_gr.add_node(i) for node in cl: newpp_eq[node] = i (new_pp, newpp_lhs) = pushout_from_partial_mapping(new_pp, eq_gr, newpp_eq, newpp_lhs, {}) lhs_ag = id_of(lhs) rhs = copy.deepcopy(new_pp) rule = Rule(new_pp, lhs, rhs, newpp_lhs) if suffix is None: apply_rule_on_parent_inplace(hie, ag_id, rule, lhs_ag) else: raise ValueError("TODO? rewrite not in place")
def unfold_locus(hie, ag_id, mm_id, locus, suffix=None): """duplicate a locus that is shared between agents""" ag_gr = hie.node[ag_id].graph ag_mm = hie.get_typing(ag_id, mm_id) nuggets = [ nug for nug in tree.get_children_id_by_node(hie, ag_id, locus) if hie.node[nug].attrs["type"] == "nugget" ] # Do not merge nodes that are Not valid # As they are removed from the botom graph before the pushout not_valid = [locus] + [ node for node in ag_gr[locus] if ag_mm[node] not in ["region", "agent"] ] def valid_pullback_node(a, b, c, d, a_b, a_c, b_d, c_d, n): a_d = union_mappings(compose_homomorphisms(b_d, a_b), compose_homomorphisms(c_d, a_c)) return n not in a_d or a_d[n] not in not_valid (pp, pp_ag) = multi_pullback_pushout( ag_gr, [(hie.node[nug].graph, hie.get_typing(nug, ag_id)) for nug in nuggets], valid_pullback_node) adj_nodes = [suc for suc in ag_gr.successors(locus)] + [locus] lhs = ag_gr.subgraph(adj_nodes) new_pp = pp.subgraph(reverse_image(pp_ag, adj_nodes)) # add regions and agents that do not appear in any nuggets to the # preserved part, so we can remove edges from the locus to them to_add = { suc for suc in ag_gr.successors(locus) if ag_mm[suc] in ["region", "agent"] } - set(pp_ag.values()) for node in to_add: node_id = unique_node_id(new_pp, node) add_node(new_pp, node_id) pp_ag[node_id] = node newpp_lhs = restrict_mapping(new_pp.nodes(), pp_ag) # merge loci that have a shared successor component def common_comp(loc1, loc2): comps1 = { c for c in new_pp.successors(loc1) if ag_mm[pp_ag[c]] in ["region", "agent"] } comps2 = { c for c in new_pp.successors(loc2) if ag_mm[pp_ag[c]] in ["region", "agent"] } return comps1 & comps2 # compute equivalence classes of loci loci = [pploc for pploc in new_pp if pp_ag[pploc] == locus] classes = [{pploc} for pploc in loci] partial_eq = [{loc1, loc2} for (loc1, loc2) in combinations(loci, 2) if loc1 != loc2 and common_comp(loc1, loc2)] for eq in partial_eq: classes = merge_classes(eq, classes) # compute equivalence classes of action nodes def equiv_acts(act1, act2): def equiv_loci(locs1, locs2): if len(locs1) != 1: raise ValueError( "should have exactly one locus next to action") if len(locs2) != 1: raise ValueError( "should have exactly one locus next to action") return any(set(locs1) | set(locs2) <= cl for cl in classes) return (pp_ag[act1] == pp_ag[act2] and equiv_loci( new_pp.predecessors(act1), new_pp.predecessors(act2))) actions = [ act for act in new_pp if ag_mm[pp_ag[act]] in ["is_bnd", "bnd", "is_free", "brk"] ] action_classes = [{act} for act in actions] for (act1, act2) in combinations(actions, 2): if equiv_acts(act1, act2): action_classes = merge_classes({act1, act2}, action_classes) eq_gr = nx.DiGraph() newpp_eq = {} for i, cl in enumerate(classes + action_classes): eq_gr.add_node(i) for node in cl: newpp_eq[node] = i (new_pp, newpp_lhs) = pushout_from_partial_mapping(new_pp, eq_gr, newpp_eq, newpp_lhs, {}) lhs_ag = id_of(lhs) rhs = copy.deepcopy(new_pp) rule = Rule(new_pp, lhs, rhs, newpp_lhs) if suffix is None: apply_rule_on_parent_inplace(hie, ag_id, rule, lhs_ag) else: raise ValueError("TODO? rewrite not in place")
def unfold_nugget(hie, nug_id, ag_id, mm_id, test=False): """unfold a nugget with conflicts to create multiple nuggets""" nug_gr = copy.deepcopy(hie.node[nug_id].graph) mm_typing = copy.deepcopy(hie.get_typing(nug_id, mm_id)) ag_typing = copy.deepcopy(hie.get_typing(nug_id, ag_id)) # create one new locus for each linked agent, region or residue linked to # a locus new_ports = {} # new_port remember the loci/state it is created from old_ports = [] non_comp_neighbors = {} for node in nug_gr.nodes(): # move the state test to explicit "is_equal" nodes if mm_typing[node] == "state" and "val" in nug_gr.node[node]: for val in nug_gr.node[node]["val"]: id_prefix = "{}_{}".format(val, node) test_id = unique_node_id(nug_gr, id_prefix) add_node(nug_gr, test_id, {"val": val}) mm_typing[test_id] = "is_equal" add_edge(nug_gr, test_id, node) # for testing if test: ag = hie.node[ag_id].graph ag_test_id = unique_node_id(ag, id_prefix) add_node(ag, ag_test_id, {"val": val}) add_edge(ag, ag_test_id, ag_typing[node]) hie.edge[ag_id][mm_id].mapping[ag_test_id] = "is_equal" real_nugget = hie.node[nug_id].graph old_test_id = unique_node_id(real_nugget, id_prefix) add_node(real_nugget, old_test_id, {"val": val}) add_edge(real_nugget, old_test_id, node) hie.edge[nug_id][ag_id].mapping[old_test_id] = ag_test_id if mm_typing[node] in ["locus", "state"]: comp_neighbors = [ comp for comp in nug_gr.successors(node) if mm_typing[comp] in ["agent", "region", "residue"] ] other_neighbors = [ other for other in (nug_gr.successors(node) + nug_gr.predecessors(node)) if other not in comp_neighbors ] old_ports.append(node) for comp in comp_neighbors: id_prefix = "{}_{}".format(node, comp) port_id = unique_node_id(nug_gr, id_prefix) add_node(nug_gr, port_id) mm_typing[port_id] = mm_typing[node] ag_typing[port_id] = ag_typing[node] new_ports[port_id] = node add_edge(nug_gr, port_id, comp) for other in other_neighbors: if mm_typing[other] in ["mod", "is_equal"]: add_edge(nug_gr, other, port_id) else: add_edge(nug_gr, port_id, other) non_comp_neighbors[port_id] = set(other_neighbors) # remove the old potentially shared between agents/region/residues loci for port in old_ports: remove_node(nug_gr, port) del mm_typing[port] del ag_typing[port] # associate the components nodes (agent,region, residue) to the ports components = {} for port in new_ports: components[port] = _agents_of_components(nug_gr, mm_typing, port) def _nonconflicting(port1, action_node1, port2, action_node2): typ1 = mm_typing[action_node1] typ2 = mm_typing[action_node2] if port1 == port2: if typ1 == typ2: return False if mm_typing[port1] == "state": return True if {typ1, typ2} & {"is_free", "is_bnd"}: return False different_loci = set(nug_gr.predecessors(action_node1)) !=\ set(nug_gr.predecessors(action_node2)) return different_loci elif action_node1 != action_node2: return True elif typ1 in ["mod", "is_equal", "is_free"]: return False else: return new_ports[port1] != new_ports[port2] def replace(node): """identify is_equal and mod nodes with same values""" if mm_typing[node] == "is_equal": return ("is_equal", str(nug_gr.node[node]["val"])) if mm_typing[node] == "mod": return ("mod", str(nug_gr.node[node]["val"])) return node def reduce_subsets(set_list): return set_list def subset_up_to_equivalence(set1, set2): set1 = {frozenset(map(replace, s)) for s in set1} set2 = {frozenset(map(replace, s)) for s in set2} return set1.issubset(set2) def replace2(node): """identify is_equal and mod nodes with same values""" if mm_typing[node] == "is_equal": return ("is_equal", str(nug_gr.node[node]["val"]), frozenset(nug_gr.successors(node))) if mm_typing[node] == "mod": return ("mod", str(nug_gr.node[node]["val"]), frozenset(nug_gr.successors(node))) return node def _equivalent_actions(act1, act2, edge_list): l1 = [(port, replace(node)) for (port, node) in edge_list if node == act1] l2 = [(port, replace(node)) for (port, node) in edge_list if node == act2] return l1 == l2 def _equivalent_edge(p1, a1, p2, a2): return p1 == p2 and replace2(a1) == replace2(a2) def _valid_subsets(memo_dict, set_list): """build non conflicting sets of sets of nodes""" if set_list == []: return [[]] memo_key = frozenset(set_list) if memo_key in memo_dict: return memo_dict[memo_key] (port, a_node) = set_list[0] conflicting_edges = [ (port2, a_node2) for (port2, a_node2) in set_list[1:] if not _nonconflicting(port, a_node, port2, a_node2) ] nonconflicting_sets =\ [(port2, a_node2) for (port2, a_node2) in set_list[1:] if _nonconflicting(port, a_node, port2, a_node2)] equivalent_edges = [ (p2, n2) for (p2, n2) in set_list if p2 == port and _equivalent_actions(a_node, n2, set_list) ] new_set_list = [ (p2, n2) for (p2, n2) in set_list[1:] if p2 != port or not _equivalent_actions(a_node, n2, set_list) ] cond1 = (len([node for (_, node) in set_list[1:] if node == a_node]) == 0 and all( replace(n2) == replace(a_node) for (p2, n2) in set_list[1:] if p2 == port)) if nonconflicting_sets == new_set_list or cond1: memo_dict[memo_key] =\ [sub + [(port, a_node)] for sub in _valid_subsets(memo_dict, nonconflicting_sets)] return memo_dict[memo_key] else: without_current_edge = _valid_subsets(memo_dict, new_set_list) def conflict_with_removed_edges(edge_list): return all( any(not _nonconflicting(p1, a_node1, p2, a_node2) for (p2, a_node2) in edge_list) for (p1, a_node1) in equivalent_edges) # with_conflict = list(filter(conflict_with_current_edge, without_current_edge)) with_conflict = list( filter(conflict_with_removed_edges, without_current_edge)) memo_dict[memo_key] =\ with_conflict +\ [sub + [(port, a_node)] for sub in _valid_subsets(memo_dict, nonconflicting_sets)] return memo_dict[memo_key] def _complete_subsets(set_list): print(set_list) return [components[port] | {a_node} for (port, a_node) in set_list] def _remove_uncomplete_actions(set_list): """remove actions and test which are not connected to enough components""" labels = {node: 0 for node in nug_gr.nodes()} for nodes in set_list: for node in nodes: labels[node] += 1 to_remove = set() for node in nug_gr.nodes(): if (mm_typing[node] in ["bnd", "brk", "is_bnd"] and labels[node] < 2): to_remove.add(node) if (mm_typing[node] in ["is_free", "mod", "is_equal"] and labels[node] < 1): to_remove.add(node) return [nodes for nodes in set_list if not nodes & to_remove] port_action_list = [(port, a_node) for (port, a_nodes) in non_comp_neighbors.items() for a_node in a_nodes] # build globally non conflicting subsets and remove the uncomplete actions memo_dict = {} valid_ncss = { frozenset( map(frozenset, _remove_uncomplete_actions(_complete_subsets(set_list)))) for set_list in _valid_subsets(memo_dict, port_action_list) } maximal_valid_ncss = valid_ncss # add the nodes that where not considered at all # because they are not connected to a locus or state nodes_with_ports = set.union( set.union(*(list(non_comp_neighbors.values()) + [set()])), set.union(*(list(components.values()) + [set()]))) nodes_without_ports = set(nug_gr.nodes()) - nodes_with_ports # build the nuggets and add them to the hierarchy # as children of the old one for testing def _graph_of_ncs(ncs): sub_graphs = [(subgraph(nug_gr, nodes), {node: node for node in nodes}) for nodes in ncs] sub_graphs.append((subgraph(nug_gr, nodes_without_ports), {node: node for node in nodes_without_ports})) return multi_pullback_pushout(nug_gr, sub_graphs) valid_graphs = map(_graph_of_ncs, maximal_valid_ncss) new_nuggets = [] for (new_nugget, new_typing) in valid_graphs: if test: typing_by_old_nugget = {} for node in new_nugget.nodes(): if new_typing[node] in hie.node[nug_id].graph.nodes(): typing_by_old_nugget[node] = new_typing[node] else: typing_by_old_nugget[node] = new_ports[new_typing[node]] new_nuggets.append((new_nugget, typing_by_old_nugget)) else: new_ag_typing = compose_homomorphisms(ag_typing, new_typing) new_mm_typing = compose_homomorphisms(mm_typing, new_typing) new_nuggets.append((new_nugget, new_ag_typing, new_mm_typing)) return new_nuggets
def inject_clone_node(self, n, new_node_id=None): """Inject cloning of a node by the rule. This procedure clones `n` in the preserved part and the right-hand side. Parameters ---------- n : hashable Node from `lhs` to clone new_node_id : hashable Id for the clone Returns ------- p_new_node_id : hashable Id of the new clone node in the preserved part rhs_new_node_id : hashable Id of the new clone node in the right-hand side Raises ------ RuleError If the node to clone is already being removed by the rule or if node with the specified clone id already exists in p. """ p_nodes = keys_by_value(self.p_lhs, n) if len(p_nodes) == 0: raise RuleError( "Cannot inject cloning: node '%s' is already " "being removed by the rule, revert its removal " "first" % n) else: if new_node_id is not None and new_node_id in self.p.nodes(): raise RuleError( "Node with id '%s' already exists in the " "preserved part!") some_p_node = p_nodes[0] p_new_node_id = primitives.clone_node( self.p, some_p_node, new_node_id) self.p_lhs[p_new_node_id] = n # add it to the rhs # generate a new id for rhs rhs_new_node_id = p_new_node_id if rhs_new_node_id in self.rhs.nodes(): rhs_new_node_id = primitives.unique_node_id( self.rhs, rhs_new_node_id) primitives.add_node( self.rhs, rhs_new_node_id, self.p.node[p_new_node_id]) self.p_rhs[p_new_node_id] = rhs_new_node_id # reconnect the new rhs node with necessary edges for pred in self.p.predecessors(p_new_node_id): if (self.p_rhs[pred], rhs_new_node_id) not in self.rhs.edges(): primitives.add_edge( self.rhs, self.p_rhs[pred], rhs_new_node_id, self.p.edge[pred][p_new_node_id]) for suc in self.p.successors(p_new_node_id): if (rhs_new_node_id, self.p_rhs[suc]) not in self.rhs.edges(): primitives.add_edge( self.rhs, rhs_new_node_id, self.p_rhs[suc], self.p.edge[p_new_node_id][suc]) return (p_new_node_id, rhs_new_node_id)