示例#1
0
    def test_unorderable_nodes(self):
        """Tests that computing the longest path does not depend on
        nodes being orderable.

        For more information, see issue #1989.

        """
        # TODO In Python 3, instances of the `object` class are
        # unorderable by default, so we wouldn't need to define our own
        # class here, we could just instantiate an instance of the
        # `object` class. However, we still support Python 2; when
        # support for Python 2 is dropped, this test can be simplified
        # by replacing `Unorderable()` by `object()`.
        class Unorderable(object):
            def __lt__(self, other):
                error_msg = "< not supported between instances of " \
                    "{} and {}".format(type(self).__name__, type(other).__name__)
                raise TypeError(error_msg)

        # Create the directed path graph on four nodes in a diamond shape,
        # with nodes represented as (unorderable) Python objects.
        nodes = [Unorderable() for n in range(4)]
        G = nx.DiGraph()
        G.add_edge(nodes[0], nodes[1])
        G.add_edge(nodes[0], nodes[2])
        G.add_edge(nodes[2], nodes[3])
        G.add_edge(nodes[1], nodes[3])

        # this will raise NotImplementedError when nodes need to be ordered
        nx.dag_longest_path(G)
示例#2
0
    def test_unweighted(self):
        edges = [(1, 2), (2, 3), (2, 4), (3, 5), (5, 6), (5, 7)]
        G = nx.DiGraph(edges)
        assert_equal(nx.dag_longest_path(G), [1, 2, 3, 5, 6])

        edges = [(1, 2), (2, 3), (3, 4), (4, 5), (1, 3), (1, 5), (3, 5)]
        G = nx.DiGraph(edges)
        assert_equal(nx.dag_longest_path(G), [1, 2, 3, 4, 5])
示例#3
0
    def test_unorderable_nodes(self):
        """Tests that computing the longest path does not depend on
        nodes being orderable.

        For more information, see issue #1989.

        """
        # TODO In Python 3, instances of the `object` class are
        # unorderable by default, so we wouldn't need to define our own
        # class here, we could just instantiate an instance of the
        # `object` class. However, we still support Python 2; when
        # support for Python 2 is dropped, this test can be simplified
        # by replacing `Unorderable()` by `object()`.
        class Unorderable(object):

            def __le__(self):
                raise NotImplemented

            def __ge__(self):
                raise NotImplemented

        # Create the directed path graph on four nodes, with nodes
        # represented as (unorderable) Python objects.
        nodes = [Unorderable() for n in range(4)]
        G = nx.DiGraph()
        G.add_edges_from(pairwise(nodes))
        path = list(nx.dag_longest_path(G))
        assert_equal(path, nodes)
def mpsd(G):
    """
    Calculate the midpoint scaling dimension of a DAG

    Parameters
    ----------

    G : Networkx DiGraph
    """
    if G.number_of_edges() == 0:
        return 0.
    LP = nx.dag_longest_path(G)
    if len(LP) < 5:
        return 0.
    u, v = LP[0], LP[-1]

    I = dag.interval(G, u, v)

    # easy method - just check every item on the midpoint
    # hard method - start at the middle and check to see where value first drops
    # easy is implemented here for now
    max_N_min = 0
    max_intervals = None
    for i, w in enumerate(LP):
        intervals = sub_interval_sizes(I, u, v, w)
        N_min = min(intervals)
        if N_min > max_N_min:
            max_N_min = N_min
            max_intervals = intervals

    I_total = I.number_of_nodes()
    sub_I_total = sum(max_intervals) - 1.  # midpoint appears twice
    D = np.log2(I_total / sub_I_total)
    return (D + 1)
示例#5
0
文件: dag.py 项目: ProgVal/networkx
def dag_longest_path_length(G, weight='weight', default_weight=1):
    """Returns the longest path length in a DAG

    Parameters
    ----------
    G : NetworkX DiGraph
        A directed acyclic graph (DAG)

    weight : string, optional
        Edge data key to use for weight

    default_weight : int, optional
        The weight of edges that do not have a weight attribute

    Returns
    -------
    int
        Longest path length

    Raises
    ------
    NetworkXNotImplemented
        If `G` is not directed

    See also
    --------
    dag_longest_path
    """
    path = nx.dag_longest_path(G, weight, default_weight)
    path_length = 0
    for (u, v) in pairwise(path):
        path_length += G[u][v].get(weight, default_weight)

    return path_length
示例#6
0
def main():
    G = nx.DiGraph()  # G eh um grafo direcionado
    # gera o grafo apartir de suas arestas
    G.add_weighted_edges_from([(1,2,2.0),(1,3,1.0),(2,3,3.0),(2,4,3.0),(3,5,1.0),(4,6,2.0),(5,4,2.0),(5,6,5.0)])
    for i in G.edges():
        # print i[0], i[1]
        G[i[0]][i[1]]["color"] = "black"
    # G[1][2]["color"] = "red"
    maiorCaminho = nx.dag_longest_path(G)
    print maiorCaminho
    for i in range(1, len(maiorCaminho)):
        G[maiorCaminho[i-1]][maiorCaminho[i]]["color"] = "red"
    desenhaGrafo(G, "grafo-3.png")
def ftrace_callgraph_dot(ftracefile):
        callgraph=nx.DiGraph()
        ftracef=open(ftracefile)
        for l in ftracef:
                ltok=l.split(":")
                callgraphedge=ltok[1]
                callgraphedgetok=callgraphedge.split("<-")
                callgraph.add_edge(callgraphedgetok[1], callgraphedgetok[0])
        write_dot(callgraph,"CyclomaticComplexitySparkMapReducer.ftrace_callgraph.dot")
        sorted_pagerank_nxg=sorted(nx.pagerank(callgraph).items(),key=operator.itemgetter(1), reverse=True)
        print "Most active kernel code - PageRank of call graph:",sorted_pagerank_nxg
        sorted_degreecentral_nxg=sorted(nx.degree_centrality(callgraph).items(),key=operator.itemgetter(1), reverse=True)
        print "Most active kernel code - Degree centrality of call graph:",sorted_degreecentral_nxg
	print "Simple Cycles in call graph:"
	for cycle in nx.simple_cycles(callgraph):
		print "Cycle:",cycle
	print "Longest Path (callstack) in call graph:",nx.dag_longest_path(callgraph)
示例#8
0
文件: dag.py 项目: 4c656554/networkx
def dag_longest_path_length(G):
    """Returns the longest path length in a DAG

    Parameters
    ----------
    G : NetworkX DiGraph
        Graph

    Returns
    -------
    path_length : int
        Longest path length

    Raises
    ------
    NetworkXNotImplemented
        If G is not directed

    See also
    --------
    dag_longest_path
    """
    path_length = len(nx.dag_longest_path(G)) - 1
    return path_length
示例#9
0
 def build_dag_scaffold(self, g):
     scaffold_order = []
     scaffold_seq = ''
     #Determine path in DAG
     path = nx.dag_longest_path(g)
     #Add sequence and edge data to scaffold_order
     for n in path:
         seq = g.node[n]['seq']
         try:
             s = next(g.successors_iter(n))
             dist = g[n][s]['D']
             scaffold_order.append(seq)
             scaffold_order.append(dist)
         except:
             scaffold_order.append(seq)
     #Convert gaps in scaffold_order into strings of Ns
     for i in range(len(scaffold_order)):
         value = scaffold_order[i]
         if type(value) is int and value > 0:
             scaffold_order[i] = 'N' * value
     #Build scaffold_seq and merge overlaps
     while len(scaffold_order) > 0:
         merge_seq = ''
         item = scaffold_order.pop()
         if type(item) is str:
             scaffold_seq = item + scaffold_seq
         elif type(item) is int and item == 0:
             merge_seq = scaffold_order.pop()
             scaffold_seq = merge_seq + scaffold_seq
         elif type(item) is int and item < 0:
             merge_seq = scaffold_order.pop()
             scaffold_seq = self.merge_seqs(merge_seq, scaffold_seq, item)
         else:
             sys.exit('FATAL ERROR: Unknown scaffold building operation!')
     #Report assembled scaffold sequence
     self.scaffolds.add(scaffold_seq)
示例#10
0
def longest_path(G):
    '''
    used for merging raw reads into superead
    '''
    path = nx.dag_longest_path(G)  # ['1','3','2']
    return path
示例#11
0
def main():
    global seqDict
    global rna_gap
    global g
    global args
    global scaffolding_type_1
    global scaffolding_type_2

    """
    Parsing arguments and initiating variables
    """

    if args.d:
        logging.basicConfig(format='%(asctime)s::%(levelname)s::%(message)s',filename=args.log, level=logging.DEBUG)
    elif args.v:
        logging.basicConfig(format='%(asctime)s::%(levelname)s::%(message)s',filename=args.log, level=logging.INFO)
    else:
        logging.basicConfig(format='%(asctime)s::%(levelname)s::%(message)s',filename=args.log)
    rna_gap=int(args.rna_gap)
    logging.info("Begin the analysis with Scaff2link version "+version)
    mkdir_p(args.outDir)
    # pour avoir les arguments c'est args.fasta / args.phylo / args.rna etc ... ça te renvoie la string (type string)
    g=nx.MultiDiGraph()
    ## Adding all vertexes to the graph

    """
    Reading fasta
    """

    seqDict=dict()
    for record in SeqIO.parse(args.fasta, "fasta"):
        seqDict[record.id]=record.seq
        g.add_node(record.id,
        length=len(record.seq),
        log10Len=math.log10(len(record.seq)),
        strand='')
    logging.info("Number of nodes parsed: "+str(len(g.nodes)))

    """
    Adding edges from Ragout :
    """

    logging.info("Parsing edges from synteny information using ragout: "+args.phylo)
    scaffolding_type_1="synteny"
    current_scaffold=""
    fromName=""
    gap_length=0
    with open(args.phylo) as f:
        for line in f:
            line=line.rstrip("\n")
            if line[0] != "#" and line !="":
                elems=line.split("\t")
                if current_scaffold == elems[0]:
                    if elems[4] == "N":
                        gap_length=int(elems[5])
                    else:
                        if not (fromName in g.nodes and elems[5] in g.nodes):
                            logging.critical("Unknown node name when parsing ragout edge from "+fromName+" to "+elems[5])
                            sys.exit(1)
                        g.add_edge(fromName,elems[5],type=scaffolding_type_1,fromStrand=from_orientation,toStrand=elems[8],gap=gap_length, readsCount=-1)
                        g.nodes[fromName]["strand"]=from_orientation
                        g.nodes[elems[5]]["strand"]=elems[8]
                        fromName=elems[5]
                        from_orientation=elems[8]
                else:
                    current_scaffold=elems[0]
                    fromName=elems[5]
                    from_orientation=elems[8]

    """
    Parsing edges from Agouti :
    """

    logging.info("Parsing joint pairs in edges from rna-seq information using agouti: "+args.rna)
    scaffolding_type_2="rna-seq"
    joint_pairs=dict()
    with open(args.rna) as f:
        for line in f:
            line=line.rstrip()
            elems=line.split("\t")
            fromName=elems[1]
            toName=elems[4]
            if args.lib_type[0]=='f':
                FromStrand=elems[3]
            else:
                FromStrand=strand_reverse(elems[3])
            if args.lib_type[1]=='f':
                ToStrand=elems[6]
            else:
                ToStrand=strand_reverse(elems[6])
            key="\t".join([FromStrand,fromName,ToStrand,toName])
            if key not in joint_pairs.keys():
                joint_pairs[key]=1
            else:
                joint_pairs[key]+=1

    """
    Adding parsed edges from Agouti :
    """

    logging.info("Adding rna-seq based edges to the scaffolding graph")
    for key in joint_pairs.keys():
        if joint_pairs[key]>(int(args.min_reads)-1):
            key_list=key.split("\t")
            FromStrand=key_list[0]
            fromName=key_list[1]
            ToStrand=key_list[2]
            toName=key_list[3]
            if not (fromName in g.nodes and toName in g.nodes):
                logging.critical("Unknown node name when parsing agouti edge from "+fromName+" to "+toName)
                sys.exit(1)
            # Here we add the edge, but try to simplify it if it already exist because of Ragout
            numFromTo=g.number_of_edges(fromName,toName)
            add_stranded_edge(fromName,toName,FromStrand,ToStrand,scaffolding_type_2,joint_pairs[key])

    """
    Finalizing and reporting the initial graph (end of step 00)
    """

    logging.info("Scaffolding graph completed, collecting nodes statistics")
    TotalNodes=len(g.nodes)
    ConnectedNodes=TotalNodes
    ConsistantNodes=TotalNodes
    node2remove=list()
    for node in g.nodes:
        if nx.is_isolate(g,node): # WAY TO GET THE DEGREE OF NODE
            #g.remove_node(node)
            node2remove.append(node)
            ConnectedNodes-=1
            ConsistantNodes-=1
        elif g.nodes[node]["strand"]=='.':
            ConsistantNodes-=1
    for node in node2remove:
        g.remove_node(node)
    logging.info("Out of "+str(TotalNodes)+" initial nodes, "+str(ConnectedNodes)+" are connected, and among them "+str(ConsistantNodes)+" are strand consistent")
    if args.v : 
        logging.info("Collecting edges statistics")
        edgeCounter=Counter(list(g.edges))
        t1=0
        t2=0
        t12=0
        edges_list=list(g.edges)
        edges_set=set(g.edges)
        for edge in edgeCounter.keys():
            for key in range(0,edgeCounter[edge]):
                if g[edge[0]][edge[1]][key]["type"]==scaffolding_type_1:
                    t1+=1
                elif g[edge[0]][edge[1]][key]["type"]==scaffolding_type_2:
                    t2+=1
                elif g[edge[0]][edge[1]][key]["type"]==scaffolding_type_1+"_AND_"+scaffolding_type_2:
                    t12+=1
                else:
                    logging.critical("unknown scaffold type")
                    sys.exit(1)
        logging.info("Edges statistics : \n"+
            ";".join([scaffolding_type_1,scaffolding_type_2,scaffolding_type_1+"_AND_"+scaffolding_type_2])+
            "\n"+";".join([str(t1),str(t2),str(t12)]))
    mkdir_p(args.outDir+'/00-complete_graph')
    nx.write_graphml(g, args.outDir+"/00-complete_graph/graph.graphml")

    """
    Step 01 : chain simplification
    """

    logging.info("starting first chain simplification")
    setNodes=set(g.nodes)
    basename="scaff2links_chain_"
    count_name=1
    while len(setNodes)!=0:
        try:
            node=setNodes.pop()
            if g.nodes[node]["strand"]!='.':
                inspectIn=True
                inspectOut=True
                chain=[node]
                nodeIn=node
                nodeOut=node
                logging.debug("Chain simplification started on node:"+node)
                while inspectIn:
                    if len(g.in_edges(nodeOut))==1 :
                        possibleNodeOut=list(g.in_edges(nodeOut))[0][0]
                        if g.has_edge(nodeOut,possibleNodeOut):
                            inspectIn=False
                            logging.debug("STOP")
                        else:
                            logging.debug("IN chain extension: (in)"+nodeOut+"\t(out)"+possibleNodeOut)
                            if (len(g.out_edges(possibleNodeOut))==1) and (g.nodes[possibleNodeOut]["strand"]!='.'):
                                logging.debug("CONTINUE")
                                nodeOut=possibleNodeOut
                                chain.append(nodeOut)
                                setNodes.remove(nodeOut)
                            else : 
                                inspectIn=False
                                logging.debug("STOP")
                    else: inspectIn=False
                while inspectOut:
                    if len(g.out_edges(nodeIn))==1 :
                        possibleNodeIn=list(g.out_edges(nodeIn))[0][1]
                        if g.has_edge(possibleNodeIn,nodeIn):
                            inspectOut=False
                            logging.debug("STOP")
                        else:
                            logging.debug("OUT chain extension: (out)"+nodeIn+"\t(in)"+possibleNodeIn)
                            if (len(g.in_edges(possibleNodeIn))==1) and (g.nodes[possibleNodeIn]["strand"]!='.'):
                                logging.debug("CONTINUE")
                                nodeIn=possibleNodeIn
                                chain.append(nodeIn)
                                setNodes.remove(nodeIn)
                            else: 
                                inspectOut=False
                                logging.debug("STOP")
                    else: inspectOut=False
                name=basename+str(count_name)
                count_name+=1
                def_chain=chain_simplification(chain=chain,start=nodeOut,end=nodeIn,name=name,chain_is_path=False)
                logging.debug(name+"\t"+def_chain)
        except:
            nx.write_graphml(g, args.outDir+"/error.graphml")
            print(traceback.format_exc())
            logging.critical("error during first chain simplification, the current graph have been written")
            sys.exit(1)

    """
    end of step 01 writing...
    """

    mkdir_p(args.outDir+'/01-simplified_graph')
    nx.write_graphml(g, args.outDir+"/01-simplified_graph/graph.graphml")
    node2remove=list()
    for node in g.nodes:
        if nx.is_isolate(g,node): # WAY TO GET THE DEGREE OF NODE
            #g.remove_node(node)
            node2remove.append(node)
    for node in node2remove:
        g.remove_node(node)
    with open(args.outDir+"/01-simplified_graph/scaffolds.fasta", "w") as output_handle:
        for key in seqDict.keys():
            SeqIO.write(SeqRecord(seqDict[key],id=key,description=''), output_handle, "fasta")

    """
    Step 02 : dag simplification
    """

    logging.info("starting dag simplification")
    logging.info("Find remaining bridges")
    g_broken=nx.Graph(g.copy())
    edge2remove=list()
    for e in nx.bridges(g_broken):
        edge2remove.append(e)
    for e_ in edge2remove:
        g_broken.remove_edge(e_[0],e_[1])
    basename="scaff2links_dag_"
    count_name=1
    logging.info("Connected component analysis for DAG")
    for nset in nx.connected_components(g_broken):
        subg=g.subgraph(nset)
        if nx.is_directed_acyclic_graph(subg) and len(list(subg.nodes()))>1:
            lpath=nx.dag_longest_path(subg)
            logging.debug("DAG found :"+str(nset)+"| type="+str(type(nset)))
            if len(list(subg.nodes()))==2:
                logging.warn("A connected component resulting from the graph where all bridges have been removed is of size 2, which is mathematically unexpected")
            if len(lpath)==len(nset):
                logging.debug("DAG with all nodes in the longest path: ("+")->-(".join(lpath)+')')
                # check neighbor
                pos=-1
                start=0
                for node in lpath:
                    pos+=1
                    add_set=set()
                    if node==lpath[0] or pos == start:
                        for e in list(g.in_edges(node)):
                            add_set.add(e[0])
                    if node==lpath[-1]:
                        for e in list(g.out_edges(node)):
                            add_set.add(e[1])
                    if not (set(nx.all_neighbors(g,node)) <= (add_set | nset)):
                        logging.debug("Found cutting node in DAG: "+lpath[pos])
                        for e in list(g.out_edges(node)):
                            add_set.add(e[1])
                        if (set(nx.all_neighbors(g,node)) <= (add_set | nset)) and (pos-start>0):
                            logging.debug("cutted DAG simplification (including the cutting node) at "+lpath[pos]+": ("+")->-(".join([ lpath[x] for x in range(start,pos+1) ] )+')')
                            chain_simplification([ lpath[x] for x in range(start,pos+1) ],lpath[start],lpath[pos],basename+str(count_name),chain_is_path=True)
                            count_name+=1
                            start=pos+1
                            checkStart=False
                        elif pos-start>1:
                            logging.debug("cutted DAG simplification (excluding the cutting node) at "+lpath[pos]+": ("+")->-(".join([ lpath[x] for x in range(start,pos) ] )+')')
                            chain_simplification([ lpath[x] for x in range(start,pos) ],lpath[start],lpath[pos-1],basename+str(count_name),chain_is_path=True)
                            count_name+=1
                            checkStart=True
                        else: checkStart=True
                        if checkStart:
                            start=pos
                            for e in list(g.out_edges(node)):
                                if not e[1] in nset:
                                    start=pos+1
                                    logging.debug("NB: Cutting node excluded as start")
                                    break
                            if start==pos:
                                logging.debug("NB: Cutting node included as start")
                if pos-start>0:
                    logging.debug("Final DAG simplification : ("+")->-(".join([ lpath[x] for x in range(start,pos+1) ] )+')')
                    chain_simplification([ lpath[x] for x in range(start,pos+1) ],lpath[start],lpath[pos],basename+str(count_name),chain_is_path=True)
                    count_name+=1
                start=pos+1

    """
    end of step 02 writing...
    """

    logging.info("Write graph and fasta after DAG simplification")
    mkdir_p(args.outDir+'/02-after_dag_graph')
    nx.write_graphml(g, args.outDir+"/02-after_dag_graph/graph.graphml")
    node2remove=list()
    for node in g.nodes:
        if nx.is_isolate(g,node): # WAY TO GET THE DEGREE OF NODE
            #g.remove_node(node)
            node2remove.append(node)
    for node in node2remove:
        g.remove_node(node)
    with open(args.outDir+"/02-after_dag_graph/scaffolds.fasta", "w") as output_handle:
        for key in seqDict.keys():
            SeqIO.write(SeqRecord(seqDict[key],id=key,description=''), output_handle, "fasta")
def prog_24(fname):
    import sys
    sys.setrecursionlimit(10**5)
    f = open(fname)
    n = eval(f.readline().strip())
    graphs = {}
    # f.readline()
    for i in xrange(n):
        vs,es = map(int, f.readline().strip().split())
        graph = nx.DiGraph()
        # graph.add_nodes_from(range(1,vs+1))
        for j in xrange(es):
            e1,e2 = map(int, f.readline().strip().split())
            graph.add_edge(e1,e2)

        graphs[i]=graph

    f.close()

    # def hamilton(G):
    #     F = [(G,[G.nodes()[0]])]
    #     n = G.number_of_nodes()
    #     while F:
    #         graph,path = F.pop()
    #         confs = []
    #         for node in graph.neighbors(path[-1]):
    #             conf_p = path[:]
    #             conf_p.append(node)
    #             conf_g = nx.Graph(graph)
    #             conf_g.remove_node(path[-1])
    #             confs.append((conf_g,conf_p))
    #         for g,p in confs:
    #             if len(p)==n:
    #                 return p
    #             else:
    #                 F.append((g,p))
    #     return None

    with open('result.dat','w') as f:
        f.write('\n')

    for i in xrange(n):
        graph = graphs[i]

        ns = sorted(graph.nodes())
        lpath = nx.dag_longest_path(graph)
        if len(ns) == len(lpath):
            print 1,
            for p in lpath:
                print p,
            print
        else:
            print -1

        with open('result.dat','a') as f:
            if len(ns) == len(lpath):
                f.write(str(1)+'\t')
                for p in lpath:
                    f.write(str(p)+'\t')
                f.write('\n')
            else:
                f.write('-1\n')
 def dag_longest_path(self, DAG):
     return nx.dag_longest_path(DAG)
示例#14
0
 def test_empty(self):
     G = nx.DiGraph()
     assert nx.dag_longest_path(G) == []
示例#15
0
def blob_parser(net_file, place_file):
    length1 = 27  # 0 to 26
    length2 = 27
    G = nx.DiGraph()
    net_tree = ET.parse(net_file)
    net_root = net_tree.getroot()
    # for child in net_root:
    #     print(child.tag, child.attrib)
    place = open(place_file, 'r')
    place_dict = dict()
    start_dict = False
    while True:
        new_line = place.readline()
        if len(new_line) == 0:
            break
        # print(new_line)
        if new_line[:5] == "Array":
            new_line = new_line.split()
            length1 = int(new_line[2])
            length2 = int(new_line[4])
        if new_line[:
                    2] == "#-":  # this string is the beginning of the line which separates unwanted content from wanted
            # content
            start_dict = True
            break
    while start_dict:
        new_line = place.readline()
        if len(new_line) == 0:
            break
        new_line = new_line.split()
        place_dict[new_line[0]] = tuple(new_line[1:])
    place.close()
    add_nodes_recursive(G, net_root, place_dict, 0)
    print("")
    print("Graph G size is " + str(len(G.nodes)))
    # for node in G.nodes:
    #     print(str(node)+" "+str(G.nodes[node]))

    # Adding edges:
    print("\nStarting edge construction")
    in_edge_list = edge_construction(net_root, dict(), G)
    print("IN_EDGE_LIST " + str(in_edge_list))
    print("\n###############\n")
    for coord in in_edge_list:
        # print("coord is "+str(coord))
        for v in in_edge_list[coord]:
            for u in in_edge_list[coord][v]:
                # print(str(u)+" "+str(v))
                # forbidden = {'open', 'top^iReset'}
                if u in G.nodes and v in G.nodes:
                    G.add_edge(u, v, weight=distance_in_graph(G, u, v))

    # Remove flip-flops with in-edges as well as out-edges
    ff_to_remove = set()
    for edge in G.edges:
        if G.nodes[edge[1]]['type'] == "ff" and len(G[edge[1]]) > 0:
            ff_to_remove.add(edge[1])
    for node in ff_to_remove:
        G.remove_node(node)
    # print("Graph G size is "+str(len(G.nodes)))

    longest_path = nx.dag_longest_path(G)
    print("Longest path in original graph is " + str(longest_path))
    for v in longest_path:
        print(str(G.nodes[v]['x']) + " " + str(G.nodes[v]['y']))
    length_longest = nx.dag_longest_path_length(G)
    print("longest path has length " + str(length_longest) + " with " +
          str(len(longest_path)) + " nodes.")
    for i in range(len(longest_path) - 3):
        v1 = longest_path[i]
        v2 = longest_path[i + 1]
        v3 = longest_path[i + 2]
        print("I am starting to move: " + str(v1) + " " + str(v2) + " " +
              str(v3))
        # condition to check whether we want to do the new graph or not
        # if yes:
        d = distance_in_graph(G, v1, v2) + distance_in_graph(G, v2, v3)
        print("v1 v2 are at distance " + str(distance_in_graph(G, v1, v2)))
        print("v3 v2 are at distance " + str(distance_in_graph(G, v2, v3)))

        left = max(min(G.nodes[v1]['x'] - d, G.nodes[v3]['x'] - d), 0)
        right = min(max(G.nodes[v1]['x'] + d, G.nodes[v3]['x'] + d), length1)
        bottom = max(min(G.nodes[v1]['y'] - d, G.nodes[v3]['y'] - d), 0)
        top = min(max(G.nodes[v1]['y'] + d, G.nodes[v3]['y'] + d), length2)
        old_coordinates = (G.nodes[v2]['x'], G.nodes[v2]['y'])
        print(
            str((top - bottom) * (right - left)) +
            " is the total number of points we check")
        for x in range(left, right):
            for y in range(bottom, top):
                #             try x,y as new coordinates if new sum of distances is smaller
                new_v1v2 = abs(G.nodes[v1]['x'] - x) + abs(G.nodes[v1]['y'] -
                                                           y) + 8
                new_v2v3 = abs(G.nodes[v3]['x'] - x) + abs(G.nodes[v3]['y'] -
                                                           y) + 8
                if new_v1v2 + new_v2v3 >= d:
                    continue
                changing_vertices = [
                    node for node in in_edge_list[old_coordinates]
                    if node != v2 and node in G.nodes
                ]
                G.nodes[v2]['x'] = x
                G.nodes[v2]['y'] = y
                G.add_edge(v1, v2, weight=distance_in_graph(G, v1, v2))
                G.add_edge(v2, v3, weight=distance_in_graph(G, v2, v3))
                # changing_vertices = [node for node in in_edge_list[old_coordinates]]
                # G.edges[]
                for u in changing_vertices:
                    update_edges_to_from_u(G, u, x, y)
                longest_path_new = nx.dag_longest_path(G)
                new_longest_length = nx.dag_longest_path_length(G)
                if length_longest > new_longest_length:
                    print("FOUND A BETTER CONNECTION")
                    print("Change all nodes from the cluster in " +
                          str(old_coordinates) + " to " + str((x, y)))
                    new_d = distance_in_graph(G, v1, v2) + distance_in_graph(
                        G, v2, v3)
                    print(
                        "For our triplet, the sum of taxicab distances changed from "
                        + str(d) + " to " + str(new_d) +
                        " [which should be the same as " +
                        str(new_v2v3 + new_v1v2) + "]. ")
                    print("new longest path has length " +
                          str(new_longest_length) + " with " +
                          str(len(longest_path_new)) + " nodes.")
                    print("New longest path is : " + str(longest_path_new))
                    if longest_path_new != longest_path:
                        print("NEW LONGEST PATH IS DIFFERENT!")

                print("\nCompleted " + str((x, y)) + "\n###############\n")
                for u in changing_vertices:
                    update_edges_to_from_u(G, u, old_coordinates[0],
                                           old_coordinates[1])
                G.nodes[v2]['x'] = old_coordinates[0]
                G.nodes[v2]['y'] = old_coordinates[1]
                G.add_edge(v1, v2, weight=distance_in_graph(G, v1, v2))
                G.add_edge(v2, v3, weight=distance_in_graph(G, v2, v3))

    print("no of edges: " + str(len(G.edges)))
示例#16
0
 def test_empty(self):
     G = nx.DiGraph()
     assert_equal(nx.dag_longest_path(G), [])
示例#17
0
 def camino_critico(self):
     return {'Actividades': [(attributes['nombre']) for (nodo_inicial, nodo_final, attributes) in
                             self.graph.edges(data=True) if attributes['H_total'] == 0],
             'Nodos': nx.dag_longest_path(self.graph, weight='duracion')}
示例#18
0
    def _layout(graph, quality=2):
        """
        :param graph:  a networkx.DiGraph object
        :param quality: 0=dirty, 1=draft, 2=good, 3=great, 4=publish
        :return: position dict keyed by node names
        """
        if not nx.is_directed_acyclic_graph(graph):
            DataJointError('This layout only works for acyclic graphs')

        # assign depths
        nodes = set(node for node in graph.nodes() if not graph.in_edges(node))  # root
        depth = 0
        depths = {}
        while nodes:
            depths = dict(depths, **dict.fromkeys(nodes, depth))
            nodes = set(edge[1] for edge in graph.out_edges(nodes))
            depth += 1
        # push depth down as far as possible
        updated = True
        while updated:
            updated = False
            for node in graph.nodes():
                if graph.successors(node):
                    m = min(depths[n] for n in graph.successors(node)) - 1
                    updated = updated or m > depths[node]
                    depths[node] = m
        longest_path = nx.dag_longest_path(graph)  # place at x=0

        # assign initial x positions
        x = dict.fromkeys(graph, 0)
        unplaced = set(node for node in graph if node not in longest_path)
        for node in sorted(unplaced, key=graph.degree, reverse=True):
            neighbors = set(nx.all_neighbors(graph, node))
            placed_neighbors = neighbors.difference(unplaced)
            placed_other = set(graph.nodes()).difference(unplaced).difference(neighbors)
            x[node] = (sum(x[n] for n in placed_neighbors) -
                       sum(x[n] for n in placed_other) +
                       0.05*(np.random.ranf()-0.5))/(len(placed_neighbors) + len(placed_other) + 0.01)
            x[node] += 2*(x[node] > 0)-1
            unplaced.remove(node)

        nodes = nx.topological_sort(graph)
        x = np.array([x[n] for n in nodes])

        intersecting_edge_pairs = list(
            [[nodes.index(n) for n in edge1],
             [nodes.index(n) for n in edge2]]
            for edge1, edge2 in itertools.combinations(graph.edges(), 2)
            if len(set(edge1 + edge2)) == 4 and (
                depths[edge1[1]] > depths[edge2[0]] and
                depths[edge2[1]] > depths[edge1[0]]))
        depths = depth - np.array([depths[n] for n in nodes])

        #  minimize layout cost function (for x-coordinate only)
        A = np.asarray(nx.to_numpy_matrix(graph, dtype=bool))   # adjacency matrix
        A = np.logical_or(A, A.transpose())
        D = np.zeros_like(A,dtype=bool)         # neighbor matrix
        for d in set(depths):
            ix = depths == d
            D[np.outer(ix,ix)]=True
        D = np.logical_xor(D, np.identity(len(nodes), bool))

        def cost(xx):
            xx = np.expand_dims(xx, 1)
            g = xx.transpose()-xx
            h = g**2 + 1e-8
            crossings = sum((xx[edge1[0]][0] > xx[edge2[0]][0]) != (xx[edge1[1]][0] > xx[edge2[1]][0])
                            for edge1, edge2 in intersecting_edge_pairs)
            return crossings*1000 + h[A].sum() + 0.1*h[D].sum() + (1/h[D]).sum()

        def grad(xx):
            xx = np.expand_dims(xx, 1)
            g = xx.transpose()-xx
            h = g**2 + 1e-8
            return -2*((A*g).sum(axis=1) + 0.1*(D*g).sum(axis=1) - (D*g/h**2).sum(axis=1))
        niter = [100, 200, 500, 1000, 3000][quality]
        maxiter = [1, 2, 3, 4, 4][quality]
        x = basinhopping(cost, x, niter=niter, interval=40, T=30, stepsize=1.0, disp=False,
                         minimizer_kwargs=dict(jac=grad, options=dict(maxiter=maxiter))).x
        # normalize coordinates to unit square
        phi = np.pi*20/180   # rotate coordinate slightly
        cs, sn = np.cos(phi), np.sin(phi)
        x, depths = cs*x - sn*depths,  sn*x + cs*depths
        x -= x.min()
        x /= x.max()+0.01
        depths -= depths.min()
        depths = depths/(depths.max()+0.01)
        return {node: (x, y) for node, x, y in zip(nodes, x, depths)}
示例#19
0
import numpy as np
import matplotlib.pyplot as plt
from pulp import LpMaximize, LpProblem, LpStatus, lpSum, LpVariable, LpMinimize
import networkx as nx
#Task1
print("Task1")
# the adjacency matrix of network graph
networkGraph = np.array([[0, 9, 11, 15, 0, 0, 0, 0], [0, 0, 0, 6, 11, 0, 0, 0],
                         [0, 0, 0, 4, 0, 13, 0, 0], [0, 0, 0, 0, 7, 5, 10, 0],
                         [0, 0, 0, 0, 0, 0, 0, 17], [0, 0, 0, 0, 0, 0, 3, 12],
                         [0, 0, 0, 0, 0, 0, 0, 9], [0, 0, 0, 0, 0, 0, 0, 0]])
# create network graph with using the adjacency matrix
DG = nx.from_numpy_matrix(np.matrix(networkGraph), create_using=nx.DiGraph)
print("Critical route of network graph is :", nx.dag_longest_path(DG))
print("Length of critical route of network graph is:",
      nx.dag_longest_path_length(DG))
# make layout
layout = nx.shell_layout(DG)
# get weight of edges
labels = nx.get_edge_attributes(DG, "weight")
# draw weight of edges
nx.draw_networkx_edge_labels(DG, pos=layout, edge_labels=labels)
# draw network graph
nx.draw(DG, node_color='red', node_size=1000, with_labels=True)
# output picture
plt.show()

# Task2
print("Task2")
attachments = np.array([5, 10, 25, 50])
timeSpent = np.zeros(4)
示例#20
0
 def test_weighted(self):
     G = nx.DiGraph()
     edges = [(1, 2, -5), (2, 3, 1), (3, 4, 1), (4, 5, 0), (3, 5, 4),
              (1, 6, 2)]
     G.add_weighted_edges_from(edges)
     assert nx.dag_longest_path(G) == [2, 3, 5]
示例#21
0
 def test_unweighted2(self):
     edges = [(1, 2), (2, 3), (3, 4), (4, 5), (1, 3), (1, 5), (3, 5)]
     G = nx.DiGraph(edges)
     assert nx.dag_longest_path(G) == [1, 2, 3, 4, 5]
示例#22
0
 def test_unweighted1(self):
     edges = [(1, 2), (2, 3), (2, 4), (3, 5), (5, 6), (3, 7)]
     G = nx.DiGraph(edges)
     assert nx.dag_longest_path(G) == [1, 2, 3, 5, 6]
        sys.exit(1)
    fname = sys.argv[1]
    with open(fname) as f:
        graph = nx.DiGraph()
        for line in f:
            tweet = json.loads(line)
            if 'id' in tweet:
                graph.add_node(tweet['id'],
                               tweet=tweet['text'],
                               author=tweet['user']['screen_name'],
                               created_at=tweet['created_at'])
                if tweet['in_reply_to_status_id']:
                    reply_to = tweet['in_reply_to_status_id']
                    if tweet['in_reply_to_status_id'] in graph \
                    and tweet['user']['screen_name'] != graph.node[reply_to]['author']:
                        graph.add_edge(tweet['in_reply_to_status_id'], tweet['id'])
        print(nx.info(graph))

        sorted_replied = sorted(graph.degree_iter(), key=itemgetter(1), reverse=True)
        most_replied_id, replies = sorted_replied[0]
        print("Most replied tweet ({} replies):".format(replies))
        print(graph.node[most_replied_id])

        print("Longest discussion:")
        longest_path = nx.dag_longest_path(graph)
        for tweet_id in longest_path:
            node = graph.node[tweet_id]
            print("{} (by {} at {})".format(node['tweet'],
                                            node['author'],
                                            node['created_at']))
示例#24
0
 def __str__(self):
     k, n = (len(nx.dag_longest_path(self._graph))), len(self)
     return "%s\n#GO-terms\t:\t%d\nmax-path\t:\t%d" % \
            (self._aspect, n, k)
示例#25
0
def getLongestPath(G):
    return nx.dag_longest_path(G)
示例#26
0
 def test_empty(self):
     G = nx.DiGraph()
     assert_equal(nx.dag_longest_path(G), [])
def make_activities():
    activities = [Activity] * 16
    activities[0] = None

    write_screenplay = Activity(1, [], "Writing a screenplay", 30)
    activities[1] = write_screenplay

    making_costumes = Activity(2, [7], "Making costumes", 5)
    activities[2] = making_costumes

    rehearsals = Activity(3, [7], "Rehearsals", 12)
    activities[3] = rehearsals

    promo_mats = Activity(4, [8], "Making promotional material", 5)
    activities[4] = promo_mats

    show_programs = Activity(5, [8], "Making programs", 3)
    activities[5] = show_programs

    sets_and_props = Activity(6, [11], "Making sets and props", 10)
    activities[6] = sets_and_props

    casting = Activity(7, [8], "Casting", 3)
    activities[7] = casting

    venue_contracting = Activity(8, [12], "Obtaining a venue", 3)
    activities[8] = venue_contracting

    organizing_lights = Activity(9, [10], "Organizing lights and stage effects", 3)
    activities[9] = organizing_lights

    dress_rehearsal = Activity(10, [3, 6], "Dress rehearsal", 1)
    activities[10] = dress_rehearsal

    hire_stage_hands = Activity(11, [8], "Hiring stage hands", 1)
    activities[11] = hire_stage_hands

    choosing_performance_dates = Activity(12, [1], "Choosing performance dates and show times", 1)
    activities[12] = choosing_performance_dates

    selling_tickets = Activity(13, [8], "Selling the tickets", 7)
    activities[13] = selling_tickets

    arranging_seating = Activity(14, [13], "Arranging seating", 2)
    activities[14] = arranging_seating

    end_node = Activity(15, [2, 9, 4, 5, 14], "End", 0)
    activities[15] = end_node

    activity_network = nx.DiGraph()
    activity_network.add_nodes_from(activities[1 : len(activities)])

    runsum = 1
    for task in activity_network:
        j = 0
        while j < len(task.predecessor_array):
            """print("adding edge # " + str(runsum) + " from " + " (activity index #" + str(
                    activities[task.predecessor_array[
                        j]].activity_index) + ") " + " to " + " (activity #" + str(
                    task.activity_index) + ") " + " (iteration # " + str(runsum) + ")") """
            activity_network.add_edge(
                activities[task.predecessor_array[j]],
                task,
                label=activities[task.predecessor_array[j]].activity_duration,
                weight=activities[task.predecessor_array[j]].activity_duration,
            )
            j = j + 1
            runsum += 1

    """#Debuggery
    i = 1
    for edge in activity_network.edges():
        print(str(i) + " " + str(edge[0]) + " -> " + str(edge[1]) + " " + str(
            activity_network.get_edge_data(edge[0], edge[1])['weight']))
        i += 1  #end_Debuggery """
    print("-------------------------")

    last_s = None
    for s in nx.dag_longest_path(activity_network):
        print(str(s))

        if not (last_s == None):
            activity_network.get_edge_data(last_s, s)["color"] = "red"

        last_s = s
    print("-------------------------")

    happy_write_dot(activity_network, "critical_path_in_activity_network.gv")

    """ #Debuggery
示例#28
0
def compute_attractor_representatives(Primes, Update):
    """
    Computes a representative state for every attractor of the network defined by *Primes* and *Update* if the network's attractors can be approximated
    by its minimal trap spaces, see :ref:`Klarner2015(b) <klarner2015approx>` for details.
    The function first computes all minimal trap spaces.
    If they are complete, univocal and faithful it returns a list of states, each belonging to a different attractor.
    Otherwise it raises an exception.

    .. note::
        If *Update* is *"synchronous"* then it is very likely that the minimal trap spaces are not a perfect approximation and the
        function will hence raise an exception.
        If you want to compute attractors of synchronous STGs we suggest to use other tools,
        for example *bns* which was introduced in :ref:`Dubrova2011 <Dubrova2011>`.
    
    **arguments**:
        * *Primes*: prime implicants
        * *Update* (str): the update strategy, one of *"asynchronous"*, *"synchronous"*, *"mixed"*
    
    **returns**:
        * *Representatives* (list of str): each state belongs to a different attractor

    **example**::

            >>> attractor_representatives(primes, "asynchronous")
            ['100','101','111']
    """

    print("function compute_attractor_representatives(..) is not ready yet")
    raise Exception

    assert (Update in ["asynchronous", "synchronous", "mixed"])

    primes = PyBoolNet.PrimeImplicants.copy(Primes)
    constants = PyBoolNet.PrimeImplicants.percolate_and_remove_constants(
        primes)
    oscillating = {}
    igraph = PyBoolNet.InteractionGraphs.primes2igraph(primes)
    outdag = PyBoolNet.InteractionGraphs.find_outdag(igraph)
    PyBoolNet.PrimeImplicants.remove_variables(primes, outdag)

    steadystates = []
    cyclic = []
    stack = []

    stack.append((primes, constants, oscillating))

    while stack:
        primes, constants, oscillating = stack.pop()

        assert (set(oscillating).issubset(set(primes)))
        assert (not set(constants).intersection(set(primes)))

        # stopping criterion
        if len(oscillating) == len(primes):
            primes_global = PyBoolNet.PrimeImplicants.copy(Primes)

            if oscillating == {}:
                PyBoolNet.PrimeImplicants.create_constants(
                    primes_global, constants)
                x = PyBoolNet.PrimeImplicants.percolate_and_remove_constants(
                    primes_global)
                assert (len(x) == len(Primes))
                steadystates.append(
                    PyBoolNet.StateTransitionGraphs.state2str(x))

            else:
                x = PyBoolNet.Utility.Misc.merge_dicts(
                    [constants, oscillating])

                if Update == "synchronous":
                    igraph = PyBoolNet.InteractionGraphs.primes2igraph(
                        primes_global)
                    igraph.remove_nodes_from(x)
                    if igraph:
                        k = len(networkx.dag_longest_path(igraph)) + 1
                        x = PyBoolNet.StateTransitionGraphs.random_state(
                            primes_global, x)
                        for j in range(k):
                            x = PyBoolNet.StateTransitionGraphs.successor_synchronous(
                                primes_global, x)

                else:
                    PyBoolNet.PrimeImplicants.create_constants(
                        primes_global, x)
                    x = PyBoolNet.PrimeImplicants.percolate_and_remove_constants(
                        primes_global)

                cyclic.append(PyBoolNet.StateTransitionGraphs.state2str(x))

            continue

        # find autonomous set
        igraph = PyBoolNet.InteractionGraphs.primes2igraph(primes)
        autoset = PyBoolNet.InteractionGraphs.find_minimal_autonomous_nodes(
            igraph, oscillating).pop()
        autoset_above = PyBoolNet.Utility.DiGraphs.ancestors(igraph, autoset)
        primes_auto = PyBoolNet.PrimeImplicants.copy(primes)

        PyBoolNet.PrimeImplicants.remove_all_variables_except(
            primes_auto, autoset_above)

        # find trapspaces inside autonomous set
        trapspaces = [
            x for x in PyBoolNet.TrapSpaces.trap_spaces(primes_auto, "min")
            if x and set(x).issubset(autoset)
        ]

        # find all new oscillating states
        initial_state = dict(
            (x, y) for x, y in oscillating.items() if x in primes_auto)

        if not trapspaces:
            x = find_attractor_state_by_randomwalk_and_ctl(
                primes_auto, Update, initial_state)
            oscillating_states_new = [x]
        else:
            oscillating_states_new = []

        finished = False
        while not finished:

            init = "INIT %s" % PyBoolNet.QueryPatterns.subspace2proposition(
                primes_auto, initial_state)
            spec = "CTLSPEC %s" % PyBoolNet.QueryPatterns.EF_oneof_subspaces(
                primes_auto, oscillating_states_new + trapspaces)
            answer, counterex = PyBoolNet.ModelChecking.check_primes_with_counterexample(
                primes_auto, Update, init, spec)
            if answer:
                finished = True
            else:
                counterex = counterex[-1]
                x = find_attractor_state_by_randomwalk_and_ctl(
                    primes_auto, Update, counterex)
                oscillating_states_new.append(x)

        # add new oscillating states to stack
        for x in oscillating_states_new:
            stack.append((primes, constants, x))

        # add new constant states to stack
        for trapspace in trapspaces:
            constants_new = PyBoolNet.Utility.Misc.merge_dicts(
                [constants, trapspace])
            primes_new = PyBoolNet.PrimeImplicants.copy(primes)
            PyBoolNet.PrimeImplicants.create_constants(primes_new,
                                                       constants_new)

            a, b = wtf(primes_auto)
            constants_new = PyBoolNet.PrimeImplicants.percolate_and_remove_constants(
                primes_new)
            c, d = wtf(primes_auto)
            if (a and b) and (c and not d):
                print "gotcha 2"
                print b
                print d

            igraph_new = PyBoolNet.InteractionGraphs.primes2igraph(primes_new)
            outdag = PyBoolNet.InteractionGraphs.find_outdag(igraph_new)

            PyBoolNet.PrimeImplicants.remove_variables(primes_new, outdag)

            stack.append((primes_new, constants_new, oscillating))

    return steadystates, cyclic
示例#29
0
#rosalind_ba3j
import networkx as nx

f = open('rosalind_ba3j.txt')
k, d = [int(i) for i in f.readline().rstrip().split()]
reads = [i.rstrip() for i in f]

G = nx.DiGraph()
n = len(reads)
G.add_nodes_from(range(n))

for i in range(n-1):
    for j in range(i+1, n):
        if reads[i][1:k] == reads[j][:k-1] and reads[i][-k+1:] == reads[j][-k:-1]:
            G.add_edge(i, j)
        if reads[j][1:k] == reads[i][:k-1] and reads[j][-k+1:] == reads[i][-k:-1]:
            G.add_edge(j, i)

ix = nx.dag_longest_path(G)
seq = [reads[i][0] for i in ix] + [reads[i][-1] for i in ix][- 2 * k - d + 1:]

open('rosalind_ba3j_sub.txt', 'wt').write(''.join(seq))
    
示例#30
0
                                             list_folders, list_files):
                    if ((reference[:2] == '__' or reference[0] != '_') and
                        (file[0] != '_' or file[:2] == '__')) or flag:
                        if file == '__init__.py':
                            name = i + '/__init__'
                        elif file[-3:] == '.py':
                            name = file[:-3]
                        elif file[-4:] == '.pyx':
                            name = file[:-4]
                        else:
                            name = 'error'
                        g.add_edge(name, reference, weight=weight)
                    else:
                        pass

print(nx.dag_longest_path(g))

#nx.draw_networkx(g, node_size=30)
#plt.draw()
#plt.show()
arr = []
path = dict(nx.all_pairs_shortest_path(g))
for start in path:
    for end in path[start]:
        if len(path[start][end]) > 2:
            arr.append((len(path[start][end]), path[start][end]))

nx.write_gml(g, 'tree.txt')

printable = '\n'.join([str(i[1]) for i in sorted(arr)[::-1]])
file = open('paths.txt', 'w+')
示例#31
0
 def test_weighted(self):
     G = nx.DiGraph()
     edges = [(1, 2, -5), (2, 3, 1), (3, 4, 1), (4, 5, 0), (3, 5, 4),
              (1, 6, 2)]
     G.add_weighted_edges_from(edges)
     assert_equal(nx.dag_longest_path(G), [2, 3, 5])
示例#32
0
        usage()
        sys.exit(1)
    fname = sys.argv[1]
    with open(fname) as f:
        graph = nx.DiGraph()
        for line in f:
            tweet = json.loads(line)
            if 'id' in tweet:
                graph.add_node(tweet['id'],
                               tweet=tweet['text'],
                               author=tweet['user']['screen_name'],
                               created_at=tweet['created_at'])
                if tweet['in_reply_to_status_id']:
                    reply_to = tweet['in_reply_to_status_id']
                    if tweet['in_reply_to_status_id'] in graph \
                    and tweet['user']['screen_name'] != graph.node[reply_to]['author']:
                        graph.add_edge(tweet['in_reply_to_status_id'], tweet['id'])
        print(nx.info(graph))

        sorted_replied = sorted(graph.degree_iter(), key=itemgetter(1), reverse=True)
        most_replied_id, replies = sorted_replied[0]
        print("Most replied tweet ({} replies):".format(replies))
        print(graph.node[most_replied_id])

        print("Longest discussion:")
        longest_path = nx.dag_longest_path(graph)
        for tweet_id in longest_path:
            node = graph.node[tweet_id]
            print("{} (by {} at {})".format(node['tweet'],
                                            node['author'],
                                            node['created_at']))
示例#33
0
文件: paths.py 项目: petrushev/graphx
def longestPath(request, graph):
    try:
        path_ = nx.dag_longest_path(graph)
    except NetworkXUnfeasible, err:
        return request.respondJson({'message': err.message}, NOT_FOUND)
示例#34
0
def assembly(graph):
    R = nx.dag_longest_path(nx.DiGraph(graph),
                            weight='weight',
                            default_weight=1)
    return R[0] + ''.join([x[-1] for x in R[1:]])