Пример #1
0
def has_no_cycle(chain: Chain):
    graph, _ = chain_as_nx_graph(chain)
    cycled = list(simple_cycles(graph))
    if len(cycled) > 0:
        raise ValueError(f'{ERROR_PREFIX} Chain has cycles')

    return True
Пример #2
0
def main():
    filename = '/home/ankursarda/Projects/graph-analytics/networkx/data/sample_data4.adj'  #sys.argv[1]
    g = ds.getDirectedData(filename)
    try:
        res = cyc.simple_cycles(g)
        print(list(res))
    except nx.exception.NetworkXNoCycle:
        print("[]")
Пример #3
0
def cndp_abstract(ndp):
    from .connection import get_connection_multigraph

    G = get_connection_multigraph(ndp.get_connections())
    cycles = list(simple_cycles(G))
    if len(cycles) > 0:
        logger.debug('cndp_abstract: %d cycles' % len(cycles))
    if not cycles:
        return dpgraph_making_sure_no_reps(ndp.context)
    else:
        return cndp_abstract_loop2(ndp)
Пример #4
0
def cndp_abstract(ndp):
    from .connection import get_connection_multigraph
    
    G = get_connection_multigraph(ndp.get_connections())
    cycles = list(simple_cycles(G))
    if len(cycles) > 0:
        logger.debug('cndp_abstract: %d cycles' % len(cycles))
    if not cycles:
        return dpgraph_making_sure_no_reps(ndp.context)
    else:
        return cndp_abstract_loop2(ndp)
Пример #5
0
def would_introduce_cycle(context, f, r):
    if f.dp == r.dp:
        return True
    from mocdp.comp.connection import get_connection_multigraph
    from networkx.algorithms.cycles import simple_cycles

    connections1 = list(context.connections)
    con = Connection(r.dp, r.s, f.dp, f.s)
    connections2 = connections1 + [con]

    G1 = get_connection_multigraph(connections1)
    G2 = get_connection_multigraph(connections2)
    cycles1 = list(simple_cycles(G1))
    cycles2 = list(simple_cycles(G2))
    c1 = len(cycles1)
    c2 = len(cycles2)
    #     if c1 != c2:
    #         print G2.edges()
    #         print('c1: %s' % cycles1)
    #         print('c2: %s' % cycles2)

    return c1 != c2
Пример #6
0
def would_introduce_cycle(context, f, r):
    if f.dp == r.dp:
        return True
    from mocdp.comp.connection import get_connection_multigraph
    from networkx.algorithms.cycles import simple_cycles

    connections1 = list(context.connections)
    con = Connection(r.dp, r.s, f.dp, f.s)
    connections2 = connections1 + [con]
    
    G1 = get_connection_multigraph(connections1)
    G2 = get_connection_multigraph(connections2)
    cycles1 = list(simple_cycles(G1))
    cycles2 = list(simple_cycles(G2))
    c1 = len(cycles1)
    c2 = len(cycles2)
#     if c1 != c2:
#         print G2.edges()
#         print('c1: %s' % cycles1)
#         print('c2: %s' % cycles2)

    return c1 != c2
Пример #7
0
def render_graph():
    global nodes_count
    global matrix
    i = 0
    g = nx.DiGraph()
    for node in range(nodes_count):
        g.add_node(node)
    for element in matrix:
        if element == 1 and i // nodes_count != i % nodes_count:
            g.add_edge(i // nodes_count, i % nodes_count)
        i += 1
    cycles = list(simple_cycles(g))
    print(cycles)

    for cycle in cycles:
        cycle = "-".join(str(e) for e in cycle)
        ttk.Label(root, text=f"{cycle}").grid()
Пример #8
0
    def _recalc(self):
        """
        Обходит граф, считая максимальные длины путей в центральную вершину и
        из неё.
        """
        path_down = nx.single_source_shortest_path_length(self.nx_graph, self.pov_id)
        path_up = dict(nx.single_target_shortest_path_length(self.nx_graph, self.pov_id))

        self.levels_down = max([length for length in path_down.values()])
        self.levels_up = max([length for length in path_up.values()])
        # поиск вершин без потомков
        for node in self.nx_graph.node:
            self.nx_graph.node[node]["is_blind"] = (not list(self.nx_graph.successors(node)))
        # поиск циклов, проходящих через точку отсчёта
        cycles = simple_cycles(self.nx_graph)
        cycles = list(filter(lambda cycle: self.pov_id in cycle, cycles))
        for node in set([node for cycle in cycles for node in cycle]):
            self[node]["in_cycle"] = True
Пример #9
0
def statistics(graph, reduce=True, verb=True):
    stats = dict()

    # perform transitive reduction
    if reduce:
        from networkx.algorithms.dag import transitive_reduction
        reduced = transitive_reduction(graph)
        stats['reduced'] = reduced

        # git the extra edges that we honestly don't need
        superfluous_edges = graph.edges() - reduced.edges()
        stats['superfluous_edges'] = superfluous_edges
        # if verb:
        # print(superfluous_edges)
    else:
        stats['reduced'] = graph

    # find any cycles
    from networkx.algorithms.cycles import simple_cycles
    cycles = list(simple_cycles(stats['reduced']))
    if verb and len(cycles) > 0:
        print("Found {0} cycles".format(len(cycles)))
    elif verb:
        print("Graph is cycle-free")
    stats['cycles'] = cycles

    # calculate connected components
    # from networkx.algorithms.components import connected_components
    components = list(nx.weakly_connected_components(stats['reduced']))
    if verb:
        print("{0} connected component{1}".format(
            len(components), "" if len(components) == 1 else "s"))
    stats['components'] = components

    # return all the stats we calclulated
    return stats
Пример #10
0
 def _find_cycles(g):
     # XXX this function needs some serious refactoring
     # (duplicated fragments of code, cryptic variables' names etc.)
     C = []
     cycle = []
     a = {node: g.successors(node) for node in g.nodes()}
     # removing self-loops (we may also try g.nodes_with_selfloops)
     for k, v in a.iteritems():
         if k in v:
             v.remove(k)
     len_old = len(a)
     while len(a) > 0:
         empty_rows = []
         nodes_to_remove = []
         for node, succ in a.iteritems():
             # if there are no successors
             if len(succ) == 0:
                 # mark this node for removal
                 empty_rows.append(node)
                 # and traverse remaining lists of successors
                 for i, j in a.iteritems():
                     # looking for previously marked node
                     if node in j: 
                         nodes_to_remove.append((i, node))
         # actual nodes removal (maybe dict's viewitems would here?)
         # 'l' == (row, index to remove)
         for l in nodes_to_remove:
             a[l[0]].remove(l[1])
         for l in empty_rows:
             a.pop(l)
         # if we run out of nodes to remove, then stop
         if len(a) == len_old:
             break
         else:
             len_old = len(a)
     # if there are cycles
     if len(a) > 0:
         # check if sequence of removed indices creates a cycle
         cycles = filter(lambda x: len(x) > 1, [set(c) for c in simple_cycles(g)])
         # pick arbitrary row/element for removal (e.g. first one)
         len_old = len(a)
         while True:
             # check if any combination of C's elements creates cycle
             if len(C) > 1:
                 c = []
                 for m in range(2, len(C) + 1):
                     c += [comb for comb in it.combinations(C, m)]
                 for n in c:
                     if set(n) in cycles:
                         cycle = list(n)
                         break
             if cycle:
                 break
             empty_rows = []
             nodes_to_remove = []
             # if we've just started or there are no changes
             if len(a) == len_old:
                 if len(a) == 1:
                     # this shouldn't happen
                     return []
                 # take the first element and remove it
                 a.items()[0][1].pop()
             # check for empty rows and remove them
             for node, succ in a.iteritems():
                 if len(succ) == 0:
                     empty_rows.append(node)
                     for i, j in a.iteritems():
                         if node in j:
                             nodes_to_remove.append((i, node))
             for l in nodes_to_remove:
                 a[l[0]].remove(l[1])
             for l in empty_rows:
                 C.append(l)
                 a.pop(l)
                 len_old = len(a)
     return cycle
Пример #11
0
 def is_recursive_predicate(self, predicate_name):
     for cyclic_predicates in simple_cycles(
             self.get_predicate_deps_graph()):
         if predicate_name in cyclic_predicates:
             return True
     return False
Пример #12
0
def cndp_makecanonical(ndp, name_inner_muxed='_inner_muxed', s_muxed='_muxed'):
    """ 
        Returns a composite with only one ndp, called <named_inner_muxed>.
        If there were cycles, then this will also have a signal caled s_muxed
        and there will be one connection to it.
        
        raises DPSemanticErrorNotConnected
    """

    assert isinstance(ndp, CompositeNamedDP), type(ndp)

    try:
        ndp.check_fully_connected()
    except NotConnected as e:
        msg = 'Cannot put in canonical form because not all subproblems are connected.'
        raise_wrapped(DPSemanticErrorNotConnected, e, msg, compact=True)

    fnames = ndp.get_fnames()
    rnames = ndp.get_rnames()

    # First, we flatten it
    ndp = cndp_flatten(ndp)

    assert ndp.get_fnames() == fnames
    assert ndp.get_rnames() == rnames

    # then we compact it (take the product of edges)
    # Note also that this abstract() the children;
    # however, because we flattened before, this is redundant,
    # as every DP is a SimpleDP
    ndp = ndp.compact()
    assert ndp.get_fnames() == fnames
    assert ndp.get_rnames() == rnames

    # Check that we have some cycles
    G = get_connection_multigraph(ndp.get_connections())
    cycles = list(simple_cycles(G))
    if not cycles:
        ndp_inner = ndp
        cycles_names = []
    else:

        # then we choose which edges to remove
        connections_to_cut = choose_connections_to_cut(
            connections=ndp.get_connections(), name2dp=ndp.get_name2ndp())

        connections_to_cut = list(connections_to_cut)
        n = len(connections_to_cut)
        cycles_names = list(['cut%d' % _ for _ in range(n)])
        ndp_inner = cndp_create_one_without_some_connections(
            ndp, connections_to_cut, cycles_names)

    assert ndp_inner.get_fnames() == fnames + cycles_names
    assert ndp_inner.get_rnames() == rnames + cycles_names

    if cycles_names:
        ndp_inner_muxed = add_muxes(ndp_inner,
                                    cs=cycles_names,
                                    s_muxed=s_muxed)
        mux_signal = s_muxed
        assert ndp_inner_muxed.get_fnames() == fnames + [mux_signal]
        assert ndp_inner_muxed.get_rnames() == rnames + [mux_signal]
    else:
        ndp_inner_muxed = ndp_inner

    name2ndp = {}
    name2ndp[name_inner_muxed] = ndp_inner_muxed
    connections = []

    connect_functions_to_outside(name2ndp,
                                 connections,
                                 ndp_name=name_inner_muxed,
                                 fnames=fnames)
    connect_resources_to_outside(name2ndp,
                                 connections,
                                 ndp_name=name_inner_muxed,
                                 rnames=rnames)

    if cycles_names:
        connections.append(
            Connection(name_inner_muxed, mux_signal, name_inner_muxed,
                       mux_signal))

    outer = CompositeNamedDP.from_parts(name2ndp=name2ndp,
                                        connections=connections,
                                        fnames=fnames,
                                        rnames=rnames)
    return outer
Пример #13
0
def clone_combinatorial(
    record_set: List[SeqRecord],
    enzymes: List[RestrictionType],
    include: List[str] = None,
    min_count: int = -1,
    linear: bool = True,
) -> List[Tuple[List[SeqRecord], List[SeqRecord]]]:
    """Parse a single list of SeqRecords to find all circularizable plasmids.

    Turn each SeqRecord's post-digest seqs into a graph where the nodes are
    the overhangs and the edges are the linear fragments
    post-digest/catalyzing with BsaI/BpiI.

    Args:
        record_set: single record set that might circularize
        enzymes: list of enzymes to digest the input records with

    Keyword Args:
        include: the include to filter assemblies
        min_count: mininum number of SeqRecords for an assembly to be considered
        linear: Whether the individual SeqRecords are assumed to be linear

    Returns:
        A list of tuples with:
            1. plasmids that will form
            2. SeqRecords that went into each formed plasmid
    """

    graph = nx.MultiDiGraph()

    seen_seqs: Set[str] = set(
    )  # stored list of input seqs (not new combinations)
    for record in record_set:
        seen_seqs.add(str(record.seq + record.seq).upper())
        seen_seqs.add(
            str((record.seq + record.seq).reverse_complement().upper()))

        for left, frag, right in _catalyze(record, enzymes, linear):
            graph.add_node(left)
            graph.add_node(right)
            graph.add_edge(left, right, frag=frag)

    try:  # find all circularizable cycles
        cycles = simple_cycles(graph)
    except NetworkXNoCycle:
        return []

    # get the fragments, enzymes back out of the cycle
    ids_to_fragments: Dict[str, List[SeqRecord]] = defaultdict(list)
    ids_to_plasmids: Dict[str, List[SeqRecord]] = defaultdict(list)
    for cycle in cycles:
        # filter for the minimum number of SeqRecords
        if min_count > 0 and len(cycle) < min_count:
            continue

        combinations = CombinatorialBins()
        for i, overhang in enumerate(cycle):
            next_overhang = cycle[(i + 1) % len(cycle)]
            record_bin = []
            for out_edge in graph.out_edges(keys=True):
                src, dest, index = out_edge
                if src != overhang or dest != next_overhang:
                    continue
                record_bin.append(graph.edges[src, dest, index]["frag"])
            combinations.append(record_bin)

        for fragments in combinations:
            # create the composite plasmid
            plasmid = SeqRecord(Seq("", IUPACUnambiguousDNA()))
            for fragment in fragments:
                plasmid += fragment.upper()

            # make sure it's not just a re-ligation of insert + backbone
            plasmid_seq = str(plasmid.seq)
            if any(plasmid_seq in seq for seq in seen_seqs):
                continue

            # filter for plasmids that have an 'include' feature
            if not _has_features(plasmid, include):
                continue

            # re-order the fragments to try and match the input order
            fragments = _reorder_fragments(record_set, fragments)

            seen_seqs.add(str(plasmid.seq + plasmid.seq))
            seen_seqs.add(str(
                (plasmid.seq + plasmid.seq).reverse_complement()))

            # make a unique id for the fragments
            fragments_id = _hash_fragments(fragments)
            ids_to_fragments[fragments_id] = fragments
            ids_to_plasmids[fragments_id].append(plasmid)

    plasmids_and_fragments: List[Tuple[List[SeqRecord], List[SeqRecord]]] = []
    for ids, fragments in ids_to_fragments.items():
        plasmids = ids_to_plasmids[ids]
        for i, plasmid in enumerate(plasmids):
            plasmid.id = "+".join(f.id for f in fragments
                                  if f.id != "<unknown id>")
            plasmid.description = f"cloned from {', '.join(str(e) for e in enzymes)}"

            if len(plasmids) > 1:
                plasmid.id += f"({i + 1})"
        plasmids_and_fragments.append((plasmids, fragments))
    return plasmids_and_fragments
Пример #14
0
def cndp_makecanonical(ndp, name_inner_muxed="_inner_muxed", s_muxed="_muxed"):
    """ 
        Returns a composite with only one ndp, called <named_inner_muxed>.
        If there were cycles, then this will also have a signal caled s_muxed
        and there will be one connection to it.
        
        raises DPSemanticErrorNotConnected
    """

    assert isinstance(ndp, CompositeNamedDP), type(ndp)

    try:
        ndp.check_fully_connected()
    except NotConnected as e:
        msg = "Cannot put in canonical form because not all subproblems are connected."
        raise_wrapped(DPSemanticErrorNotConnected, e, msg, compact=True)

    fnames = ndp.get_fnames()
    rnames = ndp.get_rnames()

    # First, we flatten it
    ndp = cndp_flatten(ndp)

    assert ndp.get_fnames() == fnames
    assert ndp.get_rnames() == rnames

    # then we compact it (take the product of edges)
    # Note also that this abstract() the children;
    # however, because we flattened before, this is redundant,
    # as every DP is a SimpleDP
    ndp = ndp.compact()
    assert ndp.get_fnames() == fnames
    assert ndp.get_rnames() == rnames

    # Check that we have some cycles
    G = get_connection_multigraph(ndp.get_connections())
    cycles = list(simple_cycles(G))
    if not cycles:
        ndp_inner = ndp
        cycles_names = []
    else:

        # then we choose which edges to remove
        connections_to_cut = choose_connections_to_cut(connections=ndp.get_connections(), name2dp=ndp.get_name2ndp())

        connections_to_cut = list(connections_to_cut)
        n = len(connections_to_cut)
        cycles_names = list(["cut%d" % _ for _ in range(n)])
        ndp_inner = cndp_create_one_without_some_connections(ndp, connections_to_cut, cycles_names)

    assert ndp_inner.get_fnames() == fnames + cycles_names
    assert ndp_inner.get_rnames() == rnames + cycles_names

    if cycles_names:
        ndp_inner_muxed = add_muxes(ndp_inner, cs=cycles_names, s_muxed=s_muxed)
        mux_signal = s_muxed
        assert ndp_inner_muxed.get_fnames() == fnames + [mux_signal]
        assert ndp_inner_muxed.get_rnames() == rnames + [mux_signal]
    else:
        ndp_inner_muxed = ndp_inner

    name2ndp = {}
    name2ndp[name_inner_muxed] = ndp_inner_muxed
    connections = []

    connect_functions_to_outside(name2ndp, connections, ndp_name=name_inner_muxed, fnames=fnames)
    connect_resources_to_outside(name2ndp, connections, ndp_name=name_inner_muxed, rnames=rnames)

    if cycles_names:
        connections.append(Connection(name_inner_muxed, mux_signal, name_inner_muxed, mux_signal))

    outer = CompositeNamedDP.from_parts(name2ndp=name2ndp, connections=connections, fnames=fnames, rnames=rnames)
    return outer