def vote(src,dest,intent): path = [] confidence = 0 if(intent == 0): #Good Guy. Non Malicious path = netshortestpath(src ,dest) confidence = get_weight(path) #path,confidence=BestBottleneckPath(src,dest) return (path,confidence) if(intent == 1): #Evil Guy. malicious for paths in nx.all_simple_paths(graph_extern, src, dest): temp= get_weight(paths) if (temp>confidence): confidence = temp path = copy.deepcopy(paths) #return shortest path weigth -1 return (path,(get_weight(netshortestpath(src, dest))-1)) if(intent == 2): #Good Guy. mis-configured for paths in nx.all_simple_paths(graph_extern, src, dest): temp= get_weight(paths) if (temp>confidence): confidence = temp path = copy.deepcopy(paths) break #return shortest path weigth -1 return (path,confidence)
def test_all_simple_paths_multigraph(): G = nx.MultiGraph([(1, 2), (1, 2)]) paths = nx.all_simple_paths(G, 1, 1) assert_equal(paths, []) nx.add_path(G, [3, 1, 10, 2]) paths = nx.all_simple_paths(G, 1, 2) assert_equal(set(tuple(p) for p in paths), {(1, 2), (1, 2), (1, 10, 2)})
def find_sg_multiple(G,inode_list,onode_list): #print 'Input node list:',inode_list #print 'Output node list:', onode_list #start = inode_list[0] oldreltypes = ['s','n','si','ni','sn','sni'] for stop in onode_list: reltypes = [] for start in inode_list: #print 'start is:', start, 'stop is:', stop roads = nx.all_simple_paths(G,start,stop) roads = list(roads) path_list = sorted(roads, lambda x,y: 1 if len(x)>len(y) else -1 if len(x)<len(y) else 0) if len(path_list)>1: if start==stop: route = path_list[1] else: route = path_list[0] else: continue prev_rel = 'sn' #print 'Looking at path',route for i in range(len(route)-1): etype = G[route[i]][route[i+1]]['edge_attr'] rel = path.add(prev_rel,etype) if rel == None: #print 'Path cannot be added at',route[i],'to',route[i+1] srel = sg_add(prev_rel,etype) regs = G.predecessors(route[i+1]) if len(regs)<=1: rel = None for regulator in regs: if regulator == route[i]: continue found = False for source in inode_list: if not nx.has_path(G,source,regulator): continue for p in nx.all_simple_paths(G,source,regulator): ptype = path.path_type(G,p) rtype = G[regulator][route[i+1]]['edge_attr'] if sg_add(ptype,rtype) == srel: found = True break if found: break if found: continue else: rel = None rel = srel prev_rel = rel reltypes.append(rel) #print 'for end point',stop,'stored relationships are:',reltypes oldreltypes = list(set(reltypes) & set(oldreltypes)) return oldreltypes
def test_all_simple_paths_on_non_trivial_graph(): ''' you may need to draw this graph to make sure it is reasonable ''' G = nx.path_graph(5, create_using=nx.DiGraph()) G.add_edges_from([(0, 5), (1, 5), (1, 3), (5, 4), (4, 2), (4, 3)]) paths = nx.all_simple_paths(G, 1, [2, 3]) assert_equal(set(tuple(p) for p in paths), { (1, 2), (1, 3, 4, 2), (1, 5, 4, 2), (1, 3), (1, 2, 3), (1, 5, 4, 3), (1, 5, 4, 2, 3)}) paths = nx.all_simple_paths(G, 1, [2, 3], cutoff=3) assert_equal(set(tuple(p) for p in paths), { (1, 2), (1, 3, 4, 2), (1, 5, 4, 2), (1, 3), (1, 2, 3), (1, 5, 4, 3)}) paths = nx.all_simple_paths(G, 1, [2, 3], cutoff=2) assert_equal(set(tuple(p) for p in paths), {(1, 2), (1, 3), (1, 2, 3)})
def get_routes(origin, destin, mode, days, hours, weights=WEIGHTS): key = (origin, destin, mode, days, hours) if key in ROUTES: routes = ROUTES[key] else: start = time() graph = get_graph(mode, hours) ncity = NUMCITIES[days, hours] print('INFO: Graph was built in {0:.3f}s' .format(time() - start)) start = time() routes = list(nx.all_simple_paths(graph, source=origin, target=destin, cutoff=ncity)) if not routes: while not routes and ncity < days: print hours if hours == 4: hours = 7 graph = get_graph(mode, hours) ncity = NUMCITIES[days, hours] elif hours == 7: hours = 10 graph = get_graph(mode, hours) ncity = NUMCITIES[days, hours] else: ncity += 1 routes = list(nx.all_simple_paths(graph, source=origin, target=destin, cutoff=ncity)) else: roundway = origin == destin # print 'MAX: ', days, hours, '->', max((len(r) - roundway for r in routes)) if hours > 4: routes.extend(get_routes(origin, destin, mode, days, 4, weights)[0]) # print 'MAX: ', days, hours, '->', max((len(r) - roundway for r in routes)) elif hours > 7: routes.extend(get_routes(origin, destin, mode, days, 7, weights)[0]) # print 'MAX: ', days, hours, '->', max((len(r) - roundway for r in routes)) print('INFO: {} routes were calculated in {:.3f}' .format(len(routes), time() - start)) start = time() routes = filter_routes(routes) print('INFO: {} routes were selected in {:.3f}' .format(len(routes), time() - start)) ROUTES[key] = routes return routes, hours
def find_directed_paths(request, project_id=None): """ Given a set of two or more skeleton IDs, find directed paths of connected neurons between them, for a maximum inner path length as given (i.e. origin and destination not counted). A directed path means that all edges are of the same kind, e.g. presynaptic_to. """ sources = set(int(v) for k,v in request.POST.iteritems() if k.startswith('skeleton_ids[')) if len(sources) < 2: raise Exception('Need at least 2 skeleton IDs to find directed paths!') path_length = int(request.POST.get('n_circles', 1)) cursor = connection.cursor() mins, relations = _clean_mins(request, cursor, int(project_id)) presynaptic_to = relations['presynaptic_to'] graph = nx.DiGraph() next_sources = sources all_sources = sources length = path_length def rev_args(fn): def f(arg1, arg2): fn(arg2, arg1) return f # Create a graph by growing the sources while length > 0 and next_sources: length -= 1 next_circles = _next_circle(next_sources, cursor) next_sources = set() for skid1, c in next_circles.iteritems(): for relationID, targets in c.iteritems(): threshold = mins[relationID] add_edge = graph.add_edge if relationID == presynaptic_to else rev_args(graph.add_edge) for skid2, count in targets.iteritems(): if count < threshold: continue add_edge(skid1, skid2) next_sources.add(skid2) next_sources = next_sources - all_sources all_sources = all_sources.union(next_sources) # Find all directed paths between all pairs of inputs unique = set() for start, end in combinations(sources, 2): for paths in [nx.all_simple_paths(graph, start, end, path_length + 1), nx.all_simple_paths(graph, end, start, path_length + 1)]: for path in paths: for node in path: unique.add(node) skeleton_ids = tuple(unique - sources) return HttpResponse(json.dumps([skeleton_ids, _neuronnames(skeleton_ids, project_id)]))
def unitigs(args): """ %prog unitigs best.edges Reads Celera Assembler's "best.edges" and extract all unitigs. """ p = OptionParser(unitigs.__doc__) p.add_option("--maxerr", default=2, type="int", help="Maximum error rate") opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) bestedges, = args G = read_graph(bestedges, maxerr=opts.maxerr, directed=True) H = nx.Graph() intconv = lambda x: int(x.split("-")[0]) for k, v in G.iteritems(): if k == G.get(v, None): H.add_edge(intconv(k), intconv(v)) nunitigs = nreads = 0 for h in nx.connected_component_subgraphs(H, copy=False): st = [x for x in h if h.degree(x) == 1] if len(st) != 2: continue src, target = st path = list(nx.all_simple_paths(h, src, target)) assert len(path) == 1 path, = path print "|".join(str(x) for x in path) nunitigs += 1 nreads += len(path) logging.debug("A total of {0} unitigs built from {1} reads.".format(nunitigs, nreads))
def _singleSegment(self, nodes): # disjoint line or a bent line at 45 degrees appearing as dichtonomous tree but an error due to # improper binarization, so remove them and do not account for statistics listOfPerms = list(itertools.combinations(nodes, 2)) if type(nodes[0]) == int: modulus = [[start - end] for start, end in listOfPerms] dists = [abs(i[0]) for i in modulus] else: dims = len(nodes[0]) modulus = [[start[dim] - end[dim] for dim in range(0, dims)] for start, end in listOfPerms] dists = [sum(modulus[i][dim] * modulus[i][dim] for dim in range(0, dims)) for i in range(0, len(modulus))] if len(list(nx.articulation_points(self._subGraphSkeleton))) == 1 and set(dists) != 1: # each node is connected to one or two other nodes which are not a distance of 1 implies there is a # one branch point with two end points in a single dichotomous tree""" for sourceOnTree, item in listOfPerms: if nx.has_path(self._subGraphSkeleton, sourceOnTree, item) and sourceOnTree != item: simplePaths = list(nx.all_simple_paths(self._subGraphSkeleton, source=sourceOnTree, target=item)) simplePath = simplePaths[0] countBranchNodesOnPath = sum([1 for point in simplePath if point in nodes]) if countBranchNodesOnPath == 2: curveLength = self._getLengthAndRemoveTracedPath(simplePath) self.isolatedEdgeInfoDict[sourceOnTree, item] = curveLength else: # each node is connected to one or two other nodes implies it is a line, endPoints = [k for (k, v) in self._nodeDegreeDict.items() if v == 1] sourceOnLine = endPoints[0] targetOnLine = endPoints[1] simplePath = nx.shortest_path(self._subGraphSkeleton, source=sourceOnLine, target=targetOnLine) curveLength = self._getLengthAndRemoveTracedPath(simplePath) self.isolatedEdgeInfoDict[sourceOnLine, targetOnLine] = curveLength
def linkpath_incidence(graph): """ get link-path incidence matrix Parameters ---------- graph: graph object Return value ------------ C: matrix of incidence link-path """ nnode, nlink = graph.numnodes, graph.numlinks npair = len(graph.ODs.keys()) G = nx.DiGraph() G.add_nodes_from(graph.nodes.keys()) G.add_edges_from([(key[0],key[1]) for key in graph.links.keys()]) indcol = -1 # C = np.ones((nlink, npair*nlink)) entries, I, J = [], [], [] for OD in graph.ODs.itervalues(): for nodes_on_path in nx.all_simple_paths(G, OD.o, OD.d): graph.add_path_from_nodes(nodes_on_path) indcol += 1 for u,v,route in graph.links.keys(): indrow = graph.indlinks[(u,v,route)] for i in xrange(len(nodes_on_path)-1): if np.array_equal([u,v], [nodes_on_path[i], nodes_on_path[i+1]]): entries.append(1.0); I.append(indrow), J.append(indcol) C = spmatrix(entries, I, J, (nlink,indcol+1)) return C
def test_shortest_simple_paths(): G = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") paths = nx.shortest_simple_paths(G, 1, 12) assert_equal(next(paths), [1, 2, 3, 4, 8, 12]) assert_equal(next(paths), [1, 5, 6, 7, 8, 12]) assert_equal([len(path) for path in nx.shortest_simple_paths(G, 1, 12)], sorted([len(path) for path in nx.all_simple_paths(G, 1, 12)]))
def pathOD_incidence(graph, haspath=False): """ get path-OD incidence matrix Parameters ---------- graph: graph object haspath: if True, simple has already been set before Return value ------------ C: matrix of incidence path-OD """ nnode, nlink = graph.numnodes, graph.numlinks npair = len(graph.ODs.keys()) G = nx.DiGraph() G.add_nodes_from(graph.nodes.keys()) G.add_edges_from([(key[0],key[1]) for key in graph.links.keys()]) if not haspath: for OD in graph.ODs.itervalues(): for nodes_on_path in nx.all_simple_paths(G, OD.o, OD.d): graph.add_path_from_nodes(nodes_on_path) npath = len(graph.paths) entries, I, J = [], [], [] for OD in graph.ODs.itervalues(): u = OD.o; v=OD.d indcol = graph.indods[(u,v)] for path in graph.paths.iterkeys(): if (u == path[0]) and (v == path[-1]): indrow = graph.indpaths[path] entries.append(1.0); I.append(indrow), J.append(indcol) C = spmatrix(entries, I, J, (npath, npair)) return C
def write_dcbusspec2(G,buslist,genlist): """Writes specifications for dc bus unpowered conditions Parameters ---------- G : networkX graph buslist : list of all dc buses genlist : list of all generators """ paths = [] temp = [] edges = [] D = copy.deepcopy(G) gens2 = copy.deepcopy(gens) for i in buslist: f.write('guarantees += '"'"'&\\n\\t[](!((0=1)') for j in genlist: gens2.remove(j) D.remove_nodes_from(gens2) for path in nx.all_simple_paths(D,i,j): paths.append(path) f.write(' | (B' + str(i) + str(j) + str(len(paths)-1)+')') paths = [] gens2 = copy.deepcopy(gens) D = copy.deepcopy(G) f.write(') -> (b'+str(i)+'=0))'"'"'\n')
def write_sat_dcbusprop2(G,buslist, genlist): H = copy.deepcopy(G) for i in buslist: f.write('(assert (=> (not (or ') for j in genlist: gen_temp = copy.deepcopy(gens) gen_temp.remove(j) H.remove_nodes_from(gen_temp) for path in nx.all_simple_paths(H,i,j): f.write(' (and') for k in range(1,len(path)): if path[k] in gens: f.write(' (= g'+str(path[k])+' true)') elif path[k] in busac: f.write(' (= b'+str(path[k])+' true)') elif path[k] in null: f.write(' (= b'+str(path[k])+' true)') elif path[k] in rus: f.write(' (= r'+str(path[k])+' true)') for m in range(0,len(path)-1): if path[m] in busdc and path[m+1] in rus: pass elif path[m] in rus and path[m+1] in busdc: pass elif path[m] < path[m+1]: f.write(' (= c'+str(path[m])+str(path[m+1])+' true)') elif path[m+1] < path[m]: f.write(' (= c'+str(path[m+1])+str(path[m])+' true)') f.write(')') H = copy.deepcopy(G) f.write(')) (= b'+str(i)+' false)))\n')
def _generate_path(self, topo, src_mac, dst_mac, src_port, dst_port, src_dpid, dst_dpid): """Generate path method.""" net = nx.DiGraph(data=topo) net.add_node(src_mac) net.add_node(dst_mac) net.add_edge(int(src_dpid), src_mac, {'port': int(src_port)}) net.add_edge(src_mac, int(src_dpid)) net.add_edge(int(dst_dpid), dst_mac, {'port': int(dst_port)}) net.add_edge(dst_mac, int(dst_dpid)) target_path = None try: path = nx.shortest_path(net, src_mac, dst_mac) path2 = nx.shortest_path(net, src_mac, dst_mac) path2.pop() path2.pop(0) list_load = check_switch_load(path2, data_collection.switch_stat, constant.load_limitation) if len(list_load) > 0: # print 'lui', list_load all_paths = nx.all_simple_paths(net, src_mac, dst_mac) path_list = list(all_paths) target_path_index, target_path_cost = calculate_least_cost_path(path_list, data_collection.switch_stat, net) target_path = path_list[target_path_index] else: target_path = path print 'tarrr', target_path except Exception: target_path = None return target_path
def dcbusspec(G,busno,gen): """Creates specifications for when DC bus gets powered Parameters ---------- G : networkX graph busno : node dc bus gen : node generator """ paths = [] C = [] temp = [] edges = [] D = copy.deepcopy(G) gens2 = copy.deepcopy(gens) gens2.remove(gen) D.remove_nodes_from(gens2) for path in nx.all_simple_paths(D,busno,gen,cutoff=None): paths.append(path) for j in range(0,len(paths)): f.write('guarantees += '"'"'&\\n\\t[]((B'+str(busno)+str(gen)+str(j)+') -> (b'+str(busno)+'=1))'"'"'\n')
def get_maximal_path_length(self): """ Get the maximal path length from all simple paths that traverses the projection graph from one leave node to another. .. note:: This measure is sensitive to the resolution of a projection the same way the length of a coastline is sensitive to the resoltuion. .. warning:: Whether this code will stay in the library or not depends on future evaluation of the usefulness of this and similar descriptors. """ import networkx as nx maxl = 0 for i, node1 in enumerate(self.proj_graph.nodes()): for j, node2 in enumerate(self.proj_graph.nodes()): if j <= i: continue all_paths = nx.all_simple_paths(self.proj_graph, node1, node2) for path in all_paths: l = self._get_path_length(path) if l > maxl: maxl = l return maxl
def GetLocalBridge(g, topic, windowsize, date): ''' 通过遍历边,对是否为捷径进行判断,将跨度最大的前10名存放在数据库中 ''' localbridge = [] edges_list = g.edges() for (a,b) in edges_list: a_n = set(g.neighbors(a)) b_n = set(g.neighbors(b)) l_a_n = len(a_n) l_b_n = len(b_n) l_ab_n = len(a_n & b_n) if (l_a_n!=1)&(l_b_n!=1)&(l_ab_n==0): paths_list = nx.all_simple_paths(g, source=a, target=b) span_ab = 0 len_path = 0 for path in paths_list: len_path += 1 if len(path) > span_ab: span_ab = len(path) if len_path == 1: span_ab = 10000 # 当去除了仅有的那条边之后没有其他路径能够连接ab,则使跨度为10000 localbridge.append((a, b, span_ab, l_a_n, l_b_n)) SaveLocalBridge(topic, date, windowsize, localbridge) # 存放localbridge数组
def genPaths(self): """ Generates our list of possible paths between two nodes in the network. This routine takes advantage of the all_simple_paths feature of networkx. I attempted to use the Floyd Warshall algorithm to reconstruct the paths, but was unsucessful. Luckily, open source scientific software that solves the problem at hand exists """ # check whether the user has network x try: import networkx as nx # tell them how to get it if they don't have it except: tkMessageBox.showerror('Missing Package','In order to use this feature you must install NetworkX from http://networkx.lanl.gov\nUse \'sudo easy_install networkx\' to install the package') # make G our graph G = nx.Graph() # add our edges to the graph G.add_edges_from(self.edges) # generate our paths using all_simple_paths # http://networkx.lanl.gov/reference/generated/networkx.algorithms.simple_paths.all_simple_paths.html paths = nx.all_simple_paths(G, source=int(self.vEntry1.get()), target=int(self.vEntry2.get()), cutoff=self.n) # bring it back to list format so it can be used self.paths = list(paths)
def find_same_level_regions(region): # Load graph graph = load_graph() if not graph: logger.error("Can't trace multiple regions: Region call graph not available.\n\ Run cere profile.\n\ Tracing region {0}".format(region)) return # Find region node id region_node = get_region_id(region, graph) #Find roots roots = [n for n,d in graph.in_degree().items() if d==0] #Compute for every nodes, the max distance from himself to each root max_path_len = {} for root in roots: for n, d in graph.nodes(data=True): if n not in max_path_len: max_path_len[n]=0 #Get every paths from the current root to the node paths = list(nx.all_simple_paths(graph, root, n)) #Keep the max length path for path in paths: if len(path)-1 > max_path_len[n]: max_path_len[n] = len(path)-1 #Keep region which have the same depth than the requested region for n, p in max_path_len.iteritems(): if p == max_path_len[region_node]: yield graph.node[n]['_name']
def find_simple_path(self, nodes): if len(nodes) <= 1: return [] elif len(nodes) == 2: if len([1 for n in nodes if self._degree(n) == 1]) == 2: return sorted(list(nodes)) else: return [] # if this set of nodes has a single path through it, # there should be no nodes of degree 3 or more. We want # to know the start and end points, which are the nodes # of degree 1 degree_one_nodes = set() for node in nodes: degree = self._degree(node) if degree > 2: return [] elif degree == 1: degree_one_nodes.add(node) assert len(degree_one_nodes) == 2 node1, node2 = list(degree_one_nodes) path = list(networkx.all_simple_paths(self.graph, node1, node2)) assert len(path) == 1 if self.simple_path_is_consistent(path[0]): # not really necessary, but makes unit testing easier if path[0][0] < path[0][-1]: return path[0] else: return path[0][::-1] else: return []
def CFEC(s,t,mip): R = nx.all_simple_paths(mip, s, t, cutoff=8) proximity = 0.0 for r in R: PathWeight = mip.degree(r[0])*(PathProb(r,mip)) #check whether the degree makes a difference, or is it the same for all paths?? proximity = proximity + PathWeight return proximity
def do_all_paths(self,args): "Display all paths between two nodes" arglist = args.split(" ") if arglist[0] and arglist[1]: #Grab the args node1=arglist[0].upper() node2=arglist[1].upper() else: print "[-] Error: Args Needed" #ensure they exist if G.has_node(node1) and G.has_node(node2): if (nx.has_path(G,node1,node2)): print "[*] All Paths from %s to %s" %(node1,node2) #Get the shortest paths paths = nx.all_simple_paths(G, node1, node2) #Print all paths in pretty format for p in paths: outputpath = "[*] " for n in p: outputpath+=n+" -> " print outputpath[:-4] else: print "[-] No path exist :(" else: print "[-] Node %s or %s does not exist :(" % (node1, node2)
def number_of_lp_lattice(): for D in [2]: myfile = open(str(D)+'number_lp_lattice', 'w') myfile.write('N_side' + '\t' + 'N'+ '\t'+ 'lp length' + '\t'+ 'no. lp'+ '\n') start_time = time.time() for N_side in range(10,16,1): N = N_side**2 # model = models.COd(2, N) model = models.square_lattice_model(D, N_side) DAG = model[0] extremes = model[1] tr_DAG = tr.trans_red(DAG) lp = dl.lpd(tr_DAG, extremes[1], extremes[0]) length_lp = lp[2] j=0 paths_list = list(nx.all_simple_paths(tr_DAG, extremes[1], extremes[0], cutoff=length_lp+1)) for i in range(len(paths_list)): if len(paths_list[i])==length_lp+1: j+=1 myfile.write(str(N_side) + '\t' + str(N) +'\t' + str(length_lp)+ '\t' + str(j) + '\n') print 'done', N_side elapsed = time.time() - start_time print 'finished.',D,'Dimension. Time elapsed = ',elapsed return
def get_long_walk_score(node1, node2, graph): edge_removed = False if graph.has_edge(node1, node2): edge_removed = True edge_data = graph[node1][node2] graph.remove_edge(node1, node2) all_simple_paths = nx.all_simple_paths(graph, source=node1, target=node2, cutoff=PATH_LENGTH_CUTOFF) output = 0 num_paths = 0 for path in all_simple_paths: path_score = 1 for loc1,loc2 in zip(*(path[i:] for i in [0,1])): # for every connected pair dat = graph[loc1][loc2] num_lines = dat["first_user_num_lines"]+dat["second_user_num_lines"] path_score *= (1 - 1/float(num_lines+1) ) output = max(output,path_score) num_paths += 1 if edge_removed: graph.add_edge(node1, node2, attr_dict=edge_data) return output
def get_ontology_paths(basic_ontology, from_type, to_obj): """ type-to-type ontology path :param ontology: :param from_type: :param to_obj: :return: """ assert from_type.name in basic_ontology.types assert to_obj.type.name in basic_ontology.types graph = basic_ontology.ontology_graph.copy() assert isinstance(graph, nx.DiGraph) for function in basic_ontology.functions.values(): if function.valence > 1: graph.remove_node(function.id) """ if from_type is to_obj: paths = [cycle for cycle in nx.simple_cycles(basic_ontology.ontology_graph) if from_type.id in cycle] """ if not nx.has_path(graph, from_type.id, to_obj.type.id): paths = [] elif from_type == to_obj.type: paths = [[from_type.id]] else: paths = list(nx.all_simple_paths(graph, from_type.id, to_obj.type.id)) path_dict = {key: OntologyPath(basic_ontology, [basic_ontology.get_by_id(id_) for id_ in path] + [to_obj], key) for key, path in enumerate(paths)} return path_dict
def slice_graph(graph, node, frontier, include_frontier=False): """ Generate a slice of the graph from the head node to the given frontier. :param networkx.DiGraph graph: The graph to work on. :param node: The starting node in the graph. :param frontier: A list of frontier nodes. :param bool include_frontier: Whether the frontier nodes are included in the slice or not. :return: A subgraph. :rtype: networkx.DiGraph """ subgraph = networkx.DiGraph() for frontier_node in frontier: for simple_path in networkx.all_simple_paths(graph, node, frontier_node): for src, dst in zip(simple_path, simple_path[1:]): if include_frontier or (src not in frontier and dst not in frontier): subgraph.add_edge(src, dst) if not list(subgraph.nodes): # HACK: FIXME: for infinite loop nodes, this would return an empty set, so we include the loop body itself # Make sure this makes sense (EDG thinks it does) if (node, node) in graph.edges: subgraph.add_edge(node, node) return subgraph
def least_overlapping_backup_path(G): for n, nattr in G.nodes(data=True): pp = nattr['primary_paths'] bp = nattr['backup_paths'] bnh = nattr['backup_next_hop'] print "\nCalculating Node = %d" % n for m, mattr in G.nodes(data=True): if m == n: pp.append([]) bp.append([]) bnh[m] = None else: ppx = nx.shortest_path(G, source=n, target=m) pp.append(ppx) pm = nx.all_simple_paths(G, source=n, target=m) # random.shuffle(pm) bp.append(get_single_backup(ppx, pm)) bnh[m] = (bp[m][1]) sys.stdout.write('.') # print "Path Matrix of %d to %d = %s" % (n, m, pm) # print "Primary_path of %d to %d = %s" % (n, m, pp[m]) # print "Backup_path of %d to %d = %s" % (n, m, bp[m]) # print "Backup_next_hop of %d to %d = %.0f" % (n, m, bnh[m]) nattr['primary_paths'] = pp nattr['backup_paths'] = bp nattr['backup_next_hop'] = bnh
def get_all_simple_path(self, source, dest): """Get all simple path from a point to an other. Return ------ Return a networkX list of path """ source_node = None dest_node = None if source: for i in self.subnet_list: if Ip.ListContains([i], source): source_node = i break if dest: for i in self.subnet_list: if Ip.ListContains([i], dest): dest_node = i break if not source or not dest: for node in self.graph.nodes(data=True): if node[1]['object'].marker_type == 'from': source_node = node[0] if node[1]['object'].marker_type == 'to': dest_node = node[0] if not source_node or not self.multidigraph.has_node(source_node)\ or not dest_node or not self.multidigraph.has_node(dest_node): raise return nx.all_simple_paths(self.multidigraph, source_node, dest_node)
def get_assemblie(G,read_db): contigs={} if len(G.nodes())>1: starting_nodes=[n for n in G.nodes() if G.in_degree(n)==0] ending_nodes=[n for n in G.nodes() if G.out_degree(n)==0] paths=[] for start_node in starting_nodes: for end_node in ending_nodes: two_nodes_paths=nx.all_simple_paths(G,start_node,end_node) for path in two_nodes_paths: print path contig_key='contig_'+':'.join(path) contigs[contig_key]=read_db[path[0]] for idx in range(1,len(path)): prev,current=path[idx-1],path[idx] seq=read_db[current] #pdb.set_trace() overlap=int(G[prev][current]["label"]) contigs[contig_key]+=seq[overlap:] #contigs.append(contig) else: contig_key='contig_'+G.nodes()[0] contigs[contig_key]=read_db[G.nodes()[0]] return contigs
def _prune_states(K, graph, source, sink): """ Removes cycles and redundant nodes (that are not reachable from source) from the subgraph of graph defined by the nodes in K. """ # Create a subgraph with the nodes now in K # Find and remove cycles by deleting the edge between # the second to last node and the last node of the cycle, # thus keeping nodes that may be important # to the trust calculation. subgraph = graph.subgraph(K) cycles = nx.simple_cycles(subgraph) if cycles: for cycle in cycles: subgraph.remove_edges_from([(cycle[-2], cycle[-1])]) # Get all paths from source to sink without cycles and redundant nodes simple_paths = list(nx.all_simple_paths(G=graph, source=source, target=sink)) relevant_nodes = set(chain.from_iterable(simple_paths)) # Remove nodes no longer used (not in simple_paths) for n in K: if n not in relevant_nodes: subgraph.remove_node(n) return subgraph
digraph.remove_edge('origin', 'clk') if search: index = search.group(1) digraph.remove_edge('d' + str(index), 'q' + str(index)) d.append('d' + str(index)) q.append('q' + str(index)) file = open("Timing_Paths_SD1.txt", "w") file1 = open("CriticalPath_SD8.txt", "w") file1.write("Longest Path: " + str(path) + "\n\n") file1.write("Delay: " + str(distance)) file.write("\nInput to Output Paths: \n ") for path in nx.all_simple_paths(digraph, source='origin', target='endpoint'): #print(path) out = str(path).replace(",", "->") out = out.replace("[", "") out = out.replace("]", "") out = out.replace("'", " ") file.write("\n" + out + "\n") if not nx.all_simple_paths(digraph, source='origin', target='endpoint'): file.write("NONE \n") file.write("\nReg to Output Paths: \n") for i in q: for path in nx.all_simple_paths(digraph, source=i, target='endpoint'): out = str(path).replace(",", "->")
ff = open(f,'wr+') ffr = open(rf,'wr+') edgeNum = 1000000 edgeCount = 0 Grev = G.reverse() elist = G.edges() #find those edges G_int = nx.convert_node_labels_to_integers(G,label_attribute='old_name') #transfer node names to numbers intnlist = G_int.nodes() for e in elist: Gmini=nx.DiGraph() #initiate a mini subgraph #Now begin to calculate paths from src to dst path = nx.all_simple_paths(G, source=e[0], target=e[1], cutoff = MaxHop) #filter out all paths from src to dst with in MaxHop levels = nx.get_edge_attributes(G, 'level') for p in path: ff.write("{0}\n".format(p)) for i in range(0,len(p)-1): ff.write("{0},{1}\n".format(p[i],p[i+1])) #Use the paths obtained above to form a sub graph Gmini.add_edge(p[i],p[i+1],level=levels[(p[0], p[1])],color='red') if Gmini: Nnum = Gmini.number_of_nodes() Enum = Gmini.number_of_edges() GminiR = Gmini.reverse() #transfer edge attributes of 1 hop ground truth to opinion
def count_paths(G, edges): num_paths = 0 for v1, v2 in edges: for _ in nx.all_simple_paths(G, v1, v2): num_paths += 1 return num_paths
def get_all_paths_to(self, target_node): linear_paths = [] for unlabeled_path in nx.all_simple_paths(self.atoms, self.start_node, target_node): self.add_previously_unseen_plans(linear_paths, unlabeled_path) return linear_paths
import function as fn import csv import sys #Initialize g to graph and read from csv and lists that will hold paths g = nx.Graph() all_paths = [] s = [] the_graph = sys.argv[1] g = fn.input_csv(g, the_graph) #Ask for user inputs A = int(input("Enter A: ")) B = int(input("Enter B: ")) C = float(input("Enter C: ")) D = int(input("Enter D: ")) Color = input("Enter Color: ") #Iterate through all paths between A to B in g and store into a list for path in nx.all_simple_paths(g, A, B): all_paths.append(path) #Iterate through all valid paths attained and find those that satisfy predicates for i in all_paths: if(fn.is_path(i,A,B) and fn.total_weight(i,C,g) and fn.color(i,Color,D,g)): s.append(i) fn.output_csv(s)
def compute_path_dsjctn(network, equipment, pathreqlist, disjunctions_list): # pathreqlist is a list of Path_request objects # disjunctions_list a list of Disjunction objects # given a network, a list of requests with the set of disjunction features between # request, the function computes the set of path satisfying : first the disjunction # constraint and second the routing constraint if the request include an explicit # set of elements to pass through. # the algorithm used allows to specify disjunction for demands not sharing source or # destination. # a request might be declared as disjoint from several requests # it is a iterative process: # first computes a list of all shortest path (this may add computation time) # second elaborate the set of path solution for each synchronization vector # third select only the candidates that satisfy all synchronization vectors they belong to # fourth apply route constraints : remove candidate path that do not satisfy the constraint # fifth select the first candidate among the set of candidates. # the example network used in comments has been added to the set of data tests files # define the list to be returned path_res_list = [] # all disjctn must be computed at once together to avoid blocking # 1 1 # eg a----b-----c # |1 |0.5 |1 # e----f--h--g # 1 0.5 0.5 # if I have to compute a to g and a to h # I must not compute a-b-f-h-g, otherwise there is no disjoint path remaining for a to h # instead I should list all most disjoint path and select the one that have the less # number of commonalities # \ path abfh aefh abcgh # \___cost 2 2.5 3.5 # path| cost # abfhg| 2.5 x x x # abcg | 3 x x # aefhg| 3 x x x # from this table abcg and aefh have no common links and should be preferred # even they are not the shortest paths # build the list of pathreqlist elements not concerned by disjunction global_disjunctions_list = [e for d in disjunctions_list for e in d.disjunctions_req ] pathreqlist_simple = [e for e in pathreqlist if e.request_id not in global_disjunctions_list] pathreqlist_disjt = [e for e in pathreqlist if e.request_id in global_disjunctions_list] # use a mirror class to record path and the corresponding requests class Pth: def __init__(self, req, pth, simplepth): self.req = req self.pth = pth self.simplepth = simplepth # step 1 # for each remaining request compute a set of simple path allpaths = {} rqs = {} simple_rqs = {} simple_rqs_reversed = {} for pathreq in pathreqlist_disjt : all_simp_pths = list(all_simple_paths(network,\ source=next(el for el in network.nodes() if el.uid == pathreq.source),\ target=next(el for el in network.nodes() if el.uid == pathreq.destination),\ cutoff=80)) # sort them in km length instead of hop # all_simp_pths = sorted(all_simp_pths, key=lambda path: len(path)) all_simp_pths = sorted(all_simp_pths, key=lambda \ x: sum(network.get_edge_data(x[i],x[i+1])['weight'] for i in range(len(x)-2))) # reversed direction paths required to check disjunction on both direction all_simp_pths_reversed = [] for pth in all_simp_pths: all_simp_pths_reversed.append(find_reversed_path(pth,network)) rqs[pathreq.request_id] = all_simp_pths temp =[] for p in all_simp_pths : # build a short list representing each roadm+direction with the first item # start enumeration at 1 to avoid Trx in the list s = [e.uid for i,e in enumerate(p[1:-1]) \ if (isinstance(e,Roadm) | (isinstance(p[i],Roadm) ))] temp.append(s) # id(s) is unique even if path is the same: two objects with same # path have two different ids allpaths[id(s)] = Pth(pathreq,p,s) simple_rqs[pathreq.request_id] = temp temp =[] for p in all_simp_pths_reversed : # build a short list representing each roadm+direction with the first item # start enumeration at 1 to avoid Trx in the list temp.append([e.uid for i,e in enumerate(p[1:-1]) \ if (isinstance(e,Roadm) | (isinstance(p[i],Roadm) ))] ) simple_rqs_reversed[pathreq.request_id] = temp # step 2 # for each set of requests that need to be disjoint # select the disjoint path combination candidates = {} for d in disjunctions_list : dlist = d.disjunctions_req.copy() # each line of dpath is one combination of path that satisfies disjunction dpath = [] for i,p in enumerate(simple_rqs[dlist[0]]): dpath.append([p]) # allpaths[id(p)].d_id = d.disjunction_id # in each loop, dpath is updated with a path for rq that satisfies # disjunction with each path in dpath # for example, assume set of requests in the vector (disjunction_list) is {rq1,rq2, rq3} # rq1 p1: abfhg # p2: aefhg # p3: abcg # rq2 p8: bf # rq3 p4: abcgh # p6: aefh # p7: abfh # initiate with rq1 # dpath = [[p1] # [p2] # [p3]] # after first loop: # dpath = [[p1 p8] # [p3 p8]] # since p2 and p8 are not disjoint # after second loop: # dpath = [ p3 p8 p6 ] # since p1 and p4 are not disjoint # p1 and p7 are not disjoint # p3 and p4 are not disjoint # p3 and p7 are not disjoint for e1 in dlist[1:] : temp = [] for j,p1 in enumerate(simple_rqs[e1]): # allpaths[id(p1)].d_id = d.disjunction_id # can use index j in simple_rqs_reversed because index # of direct and reversed paths have been kept identical p1_reversed = simple_rqs_reversed[e1][j] # print(p1_reversed) # print('\n\n') for k,c in enumerate(dpath) : # print(f' c: \t{c}') temp2 = c.copy() all_disjoint = 0 for p in c : all_disjoint += isdisjoint(p1,p)+ isdisjoint(p1_reversed,p) if all_disjoint ==0: temp2.append(p1) temp.append(temp2) # print(f' coucou {e1}: \t{temp}') dpath = temp # print(dpath) candidates[d.disjunction_id] = dpath # for i in disjunctions_list : # print(f'\n{candidates[i.disjunction_id]}') # step 3 # now for each request, select the path that satisfies all disjunctions # path must be in candidates[id] for all concerned ids # for example, assume set of sync vectors (disjunction groups) is # s1 = {rq1 rq2} s2 = {rq1 rq3} # candidate[s1] = [[p1 p8] # [p3 p8]] # candidate[s2] = [[p3 p6]] # for rq1 p3 should be preferred for pathreq in pathreqlist_disjt: concerned_d_id = [d.disjunction_id for d in disjunctions_list if pathreq.request_id in d.disjunctions_req] # for each set of solution, verify that the same path is used for the same request candidate_paths = simple_rqs[pathreq.request_id] # print('coucou') # print(pathreq.request_id) for p in candidate_paths : iscandidate = 0 for sol in concerned_d_id : test = 1 # for each solution test if p is part of the solution # if yes, then p can remain a candidate for i,m in enumerate(candidates[sol]) : if p in m: if allpaths[id(m[m.index(p)])].req.request_id == pathreq.request_id : test = 0 break iscandidate += test if iscandidate != 0: for l in concerned_d_id : for m in candidates[l] : if p in m : candidates[l].remove(m) # for i in disjunctions_list : # print(i.disjunction_id) # print(f'\n{candidates[i.disjunction_id]}') # step 4 apply route constraints : remove candidate path that do not satisfy the constraint # only in the case of disjounction: the simple path is processed in request.compute_constrained_path # TODO : keep a version without the loose constraint for d in disjunctions_list : temp = [] for j,sol in enumerate(candidates[d.disjunction_id]) : testispartok = True for i,p in enumerate(sol) : # print(f'test {allpaths[id(p)].req.request_id}') # print(f'length of route {len(allpaths[id(p)].req.nodes_list)}') if allpaths[id(p)].req.nodes_list : # if p does not containt the ordered list node, remove sol from the candidate # except if this was the last solution: then check if the constraint is loose or not if not ispart(allpaths[id(p)].req.nodes_list, p) : # print(f'nb of solutions {len(temp)}') if j < len(candidates[d.disjunction_id])-1 : msg = f'removing {sol}' logger.info(msg) testispartok = False #break else: if 'loose' in allpaths[id(p)].req.loose_list: logger.info(f'Could not apply route constraint'+ f'{allpaths[id(p)].req.nodes_list} on request {allpaths[id(p)].req.request_id}') else : logger.info(f'removing last solution from candidate paths\n{sol}') testispartok = False if testispartok : temp.append(sol) candidates[d.disjunction_id] = temp # step 5 select the first combination that works pathreslist_disjoint = {} for d in disjunctions_list : test_sol = True while test_sol: # print('coucou') if candidates[d.disjunction_id] : for p in candidates[d.disjunction_id][0]: if allpaths[id(p)].req in pathreqlist_disjt: # print(f'selected path :{p} for req {allpaths[id(p)].req.request_id}') pathreslist_disjoint[allpaths[id(p)].req] = allpaths[id(p)].pth pathreqlist_disjt.remove(allpaths[id(p)].req) candidates = remove_candidate(candidates, allpaths, allpaths[id(p)].req, p) test_sol = False else: msg = f'No disjoint path found with added constraint' logger.critical(msg) print(f'{msg}\nComputation stopped.') # TODO in this case: replay step 5 with the candidate without constraints exit() # for i in disjunctions_list : # print(i.disjunction_id) # print(f'\n{candidates[i.disjunction_id]}') # list the results in the same order as initial pathreqlist for req in pathreqlist : req.nodes_list.append(req.destination) # we assume that the destination is a strict constraint req.loose_list.append('strict') if req in pathreqlist_simple: path_res_list.append(compute_constrained_path(network, req)) else: path_res_list.append(pathreslist_disjoint[req]) return path_res_list
def compute_constrained_path(network, req): trx = [n for n in network.nodes() if isinstance(n, Transceiver)] roadm = [n for n in network.nodes() if isinstance(n, Roadm)] edfa = [n for n in network.nodes() if isinstance(n, Edfa)] anytypenode = [n for n in network.nodes()] source = next(el for el in trx if el.uid == req.source) # This method ensures that the constraint can be satisfied without loops # except when it is not possible : eg if constraints makes a loop # It requires that the source, dest and nodes are correct (no error in the names) destination = next(el for el in trx if el.uid == req.destination) nodes_list = [] for n in req.nodes_list : # for debug excel print(n) nodes_list.append(next(el for el in anytypenode if el.uid == n)) # nodes_list contains at least the destination if nodes_list is None : msg = f'Request {req.request_id} problem in the constitution of nodes_list: should at least include destination' logger.critical(msg) exit() if req.nodes_list[-1] != req.destination: msg = f'Request {req.request_id} malformed list of nodes: last node should be destination trx' logger.critical(msg) exit() if len(nodes_list) == 1 : try : total_path = dijkstra_path(network, source, destination, weight = 'weight') # print('checking edges length is correct') # print(shortest_path_length(network,source,destination)) # print(shortest_path_length(network,source,destination,weight ='weight')) # s = total_path[0] # for e in total_path[1:]: # print(s.uid) # print(network.get_edge_data(s,e)) # s = e except NetworkXNoPath: msg = f'\x1b[1;33;40m'+f'Request {req.request_id} could not find a path from {source.uid} to node : {destination.uid} in network topology'+ '\x1b[0m' logger.critical(msg) print(msg) total_path = [] else : all_simp_pths = list(all_simple_paths(network,source=source,\ target=destination, cutoff=120)) candidate = [] for p in all_simp_pths : if ispart(nodes_list, p) : # print(f'selection{[el.uid for el in p if el in roadm]}') candidate.append(p) # select the shortest path (in nb of hops) -> changed to shortest path in km length if len(candidate)>0 : # candidate.sort(key=lambda x: len(x)) candidate.sort(key=lambda x: sum(network.get_edge_data(x[i],x[i+1])['weight'] for i in range(len(x)-2))) total_path = candidate[0] else: if req.loose_list[req.nodes_list.index(n)] == 'loose': print(f'\x1b[1;33;40m'+f'Request {req.request_id} could not find a path crossing {nodes_list} in network topology'+ '\x1b[0m') print(f'constraint ignored') total_path = dijkstra_path(network, source, destination, weight = 'weight') else: msg = f'\x1b[1;33;40m'+f'Request {req.request_id} could not find a path crossing {nodes_list}.\nNo path computed'+ '\x1b[0m' logger.critical(msg) print(msg) total_path = [] # obsolete method: this does not guaranty to avoid loops or correct results # Here is the demonstration : # 1 1 # eg a----b-----c # |1 |0.5 |1 # e----f--h--g # 1 0.5 0.5 # if I have to compute a to g with constraint f-c # result will be a concatenation of: a-b-f and f-b-c and c-g # which means a loop. # if to avoid loops I iteratively suppress edges of the segmenst in the topo # segment 1 = a-b-f # 1 # eg a b-----c # |1 |1 # e----f--h--g # 1 0.5 0.5 # then # segment 2 = f-h-g-c # 1 # eg a b-----c # |1 # e----f h g # 1 # then there is no more path to g destination # # # total_path = [source] # for n in req.nodes_list: # try : # node = next(el for el in trx if el.uid == n) # except StopIteration: # try: # node = next(el for el in anytypenode if el.uid == n) # except StopIteration: # try: # # TODO this test is not giving good results: full name of the # # amp is required to avoid ambiguity on the direction # node = next(el for el in anytypenode # if n in el.uid) # except StopIteration: # msg = f'could not find node : {n} in network topology: \ # not a trx, roadm, edfa, fiber or fused element' # logger.critical(msg) # raise ValueError(msg) # # extend path list without repeating source -> skip first element in the list # try: # # to avoid looping back: use an alternate graph were current path edges and vertex are suppressed # total_path.extend(dijkstra_path(network, source, node)[1:]) # source = node # except NetworkXNoPath: # if req.loose_list[req.nodes_list.index(n)] == 'loose': # print(f'could not find a path from {source.uid} to loose node : {n} in network topology') # print(f'node {n} is skipped') # else: # msg = f'could not find a path from {source.uid} to node : {n} in network topology' # logger.critical(msg) # print(msg) # total_path = [] return total_path
## print "ERROR" #guess my algorithm actually starts here, sorry about that #Part of the list this generates isn't quite correct, I'll try working on it some more if I have time #Should be something like: node_list = ['Fly', 'E', 'D', 'O', 'F', 'C', 'P', 'N', 'Q', 'M', 'T', 'R', 'L', 'K', 'S', 'B', 'G', 'H', 'A', 'J', 'Spider'] node_count = np.zeros(len(node_list))#number of paths from 'Fly' to itself should be 0 for i in range(1, len(node_list)): if G.has_edge(node_list[i], node_list[0]):#if has edge to goal, increment number of paths for that node node_count[i] += 1 #Check to see if has edge from any or the nodes that we've already solved for number of paths to the goal for j in range(i, 0, -1): #If there is an edge, then the number of paths from that node is equal to itself plus the number of paths it has an edge with if G.has_edge(node_list[i], node_list[j]): node_count[i] += node_count[j] print node_list print node_count print 'Total number of paths from Spider to Fly: ', int(node_count[i]), ' paths' ############################################################################### #Answer I should get paths = nx.all_simple_paths(G, source='Spider', target='Fly', cutoff=21) #print len(list(paths)) if (int(node_count[i]) == len(list(paths))): print 'Correct!'
G.add_node(5, entrance=True) G.add_node(7, entrance=True) print(''' [1--2--3 | | 6--4--5] | 7] ''') # add path attributes entrances = {v for v in nx.get_node_attributes(G, 'entrance')} start_and_goals = itertools.combinations(entrances, 2) for src, tgt in start_and_goals: for p in nx.all_simple_paths(G, src, tgt): G.add_path(p) # b(i,j,k)= \begin{cases} # 1 & ( v_i , v_j ) \in p_k \\ # 0 & otherwise # \end{cases} def ispartof(edge: tuple, path: mynx.Graph.Path) -> bool: for i, j in zip(path.data[::1], path.data[1::1]): if set(edge) == {i, j}: return True else: return False
def paths(self, rda_graph): return nx.all_simple_paths(rda_graph, self.source, self.subject)
def toRoom9(): G.add_edges_from(roomDoors) myRoom = designateRoom(x_ant,y_ant) searchFlag = True roomWas = False nodes = G.nodes() if (9 not in nodes): print 'I don\'t know where is room number 9.' return paths = nx.all_simple_paths(G,myRoom,9) paths = list(paths) shortPath = 1000 counter = 0 pathWithout7 = [] numberPath = len(paths) # Case when agent don't know about door leading to room 9 if(numberPath == 0): print 'I don\'t know how to get room 9 yet.' return for x in range(0,numberPath): l=len(paths[x]) if shortPath > l: shortPath = l position = x if(paths[0][0]==7): print 'I\'m in room 7. The shortest path to room 9 is:' + '-'.join(str(x) for x in paths[position][1:-1]) return shortPath = 1000 for p in range(0,numberPath): searchFlag = True pat=paths[p] for r in pat: if(r == 7): searchFlag = False if(searchFlag == True): counter += 1 roomWas = True pathWithout7.append(pat) if counter == 1: print '\nNo, I don\'t need to pass room 7.' pathLength = len(pat) if(pathLength < shortPath): shortPath = pathLength position = p if 9 == pat[0]: print 'I\'m in room 9 ! ' return elif (pathLength == 2): print 'I can go straight to room 9, through door between rooms %d and %d'%(pat[0],pat[1]) else: if(counter > 1): print 'or through room: ' + '-'.join(str(x) for x in pat[1:-1]) else: print 'To go to room 9 I can go through room: ' + '-'.join(str(x) for x in pat[1:-1]) if(not pathWithout7): print 'I must pass room 7.' else: length = len(pathWithout7) lengths = [len(x) for x in pathWithout7] if(not (all(a==lengths[0] for a in lengths))): if length > 1: if shortPath == 2: print 'But the shortest path is going straight through door between rooms %d and %d'%(paths[position][0],paths[position][1]) elif shortPath == 3: print'But the shortest path is go through room ' + str(paths[position][1]) else: print 'But the shortest path is ' + '-'.join(str(x) for x in paths[position][1:-1])
def op(self, graph, a: NodeSpec, b: NodeSpec): return [ ids_to_nodes(graph, i) for i in nx.all_simple_paths(graph.gnx, a["id"], b["id"]) ]
def simple_path((x,y), edges, nodes, pos_dict, dep_dict): """ Returns the simple dependency paths between x and y, using the simple paths \ in the graph of dependency tree. :param edges: the edges of the graph :param nodes: the nodes of the graph :return: the simple paths between x and y in which each node is normalized by lemma/pos/dep/dist """ # Gets edges without indices from edges edges_with_idx = [k for k in edges.keys()] # Builds graph G = nx.Graph() G.add_nodes_from(nodes) G.add_edges_from(edges_with_idx) # Finds paths from x to y and paths from y to x x_to_y_paths = [path for path in nx.all_simple_paths(G,source=x, target=y)] y_to_x_paths = [path for path in nx.all_simple_paths(G,source=y, target=x)] # Normalizes simple paths normalized_simple_paths = [] for path in x_to_y_paths: _paths = simple_path_normalization(path, edges, pos_dict, dep_dict) if _paths is not None: normalized_simple_paths.append(_paths) for path in y_to_x_paths: _paths = simple_path_normalization(path, edges, pos_dict, dep_dict) if _paths is not None: normalized_simple_paths.append(_paths) return normalized_simple_paths def simple_path_normalization(path, edges, pos_dict, dep_dict):
#plt.show() link_list = [] for k in range(len(edges_list)): link_list0 = [] for i in range(7): link_list0.append(edges_list[k] + (i, )) link_list.append(link_list0) SD_path = [] SR_pathLink = [] for i in range(len(SR_pair)): path0 = [] for path in nx.all_simple_paths(G, source=SR_pair[i][0], target=SR_pair[i][1]): path_edges = [] for j in range(len(path) - 1): path_edges.append((path[j], path[j + 1])) path0.append(path_edges) SD_path.append(path0) CoreSetup() for i in range(len(bypassEdges)): Link = bypassEdges[i] for j in range(7): if (len(Link[j]) > 0): if (Link[j][0][0] == edges_list[i][1]): Out_bypassEdges[i][j] = Link[j]
def runRound(edges, start, end, cost, tax, biases, N, debug=False): # For each edge there is flow * c(flow). These are the edgeFuncs. The global # optimizer minimizes the sum of these functions initial = True preferences = {} edgeResults = {} # Add ids to every edge so that biases can be indexed for i, (n1, n2, edgeData) in enumerate(edges): edgeData['id'] = i G = networkx.DiGraph() G.add_edges_from(edges) #print G.edge #print G.edge[0] for i in range(N): for n1 in G.edge: for n2 in G.edge[n1]: preferences[(i, n1, n2)] = biases[G.edge[n1][n2]['id']](i) #zero the graph edges to start for n1 in G.edge: for n2 in G.edge[n1]: G.edge[n1][n2]['f'] = 0.0 # we need to preserve the balance of edges at the end of the round. becuas I should change my mind just because the flow changed... # If I change my mind, remove myself from the previous # for r in range(R): aCur = {} # create aPrev = None ct = 0 history = [] backtrack = False totalc = 0.0 edgeResults = {} maxIters = 10 while ((initial or aCur != aPrev) and ct < maxIters): ct += 1 if aCur in history: backtrack = True history.append(dict(aCur)) aPrev = dict(aCur) order = range(N) numpy.random.shuffle(order) for i in order: if i in aPrev: for n1, n2 in aCur[i]: G.edge[n1][n2]['f'] -= 1.0 / N for n1 in G.edge: for n2 in G.edge[n1]: G.edge[n1][n2]['c'] = cost[G.edge[n1][n2]['t']](G.edge[n1][n2]['f']) + \ tax[G.edge[n1][n2]['t']](G.edge[n1][n2]['f']) + \ preferences[(i, n1, n2)] paths = networkx.all_simple_paths(G, source=start, target=end) pathCosts = [] for path in paths: c = 0 edges = zip(path[:-1], path[1:]) for n0, n1 in edges: c += G.edge[n0][n1]['c'] pathCosts.append((c, edges)) pEdge = sorted(pathCosts, key=lambda x: x[0])[0][1] aCur[i] = pEdge for n1, n2 in aCur[i]: G.edge[n1][n2]['f'] += 1.0 / N totalc = 0.0 edgeResults = {} for n1, n2 in set(G.edges()): edgeData = G.edge[n1][n2] key = (n1, n2) edgeResults[key] = edgeData['f'], edgeData['t'] totalc += edgeData['f'] * cost[edgeData['t']](edgeData['f']) if debug: for (n1, n2), (f, t) in edgeResults.items(): print u"Edge ({0}, {1}), type {2}, flow {3}".format( n1, n2, t, f) print '----' print u"Total cost: {0}".format(numpy.mean(totalc)) print '****' initial = False if debug: print 'Total iterations:', ct if debug: if backtrack: print "Game is not potential game" else: print "No backtracking detected" return totalc, edgeResults, backtrack, (ct < maxIters)
source='origin', target='dest', edge_attr=True, ) # detail documentation of networkx https://networkx.github.io/documentation/networkx-1.7/reference/generated/networkx.drawing.nx_pylab.draw_networkx.html FG.nodes() FG.edges() nx.draw_networkx( FG, with_labels=True, node_size=600, node_color='y' ) # Quick view of the Graph. As expected we see 3 very busy airports nx.algorithms.degree_centrality( FG ) # Notice the 3 airports from which all of our 100 rows of data originates nx.density(FG) # Average edge density of the Graphs nx.average_shortest_path_length( FG) # Average shortest path length for ALL paths in the Graph nx.average_degree_connectivity( FG ) # For a node of degree k - What is the average of its neighbours' degree? # Let us find all the paths available for path in nx.all_simple_paths(FG, source='JAX', target='DFW'): print(path) # Let us find the dijkstra path from JAX to DFW. # You can read more in-depth on how dijkstra works from this resource - https://courses.csail.mit.edu/6.006/fall11/lectures/lecture16.pdf dijpath = nx.dijkstra_path(FG, source='JAX', target='DFW') dijpath # Let us try to find the dijkstra path weighted by airtime (approximate case) shortpath = nx.dijkstra_path(FG, source='JAX', target='DFW', weight='air_time') shortpath
def dcbusprop(G, source, target): """Creates discrete properties for power status of dc buses Parameters ---------- G : networkX graph source : node dc bus target : node generator """ temp = [] edges = [] D = copy.deepcopy(G) gens2 = copy.deepcopy(gens) gens2.remove(target) D.remove_nodes_from(gens2) paths = [] C = [] for path in nx.all_simple_paths(D, source, target, cutoff=None): paths.append(path) for p in range(0, len(paths)): for i in range(0, len(paths[p]) - 1): if paths[p][i] in busdc and paths[p][i + 1] in rus: pass elif paths[p][i] in rus and paths[p][i + 1] in busdc: pass else: C.append((paths[p][i], paths[p][i + 1])) f.write('disc_props[' "'" 'B' + str(source) + str(target) + str(p) + "'" '] = ' "'") if paths[p][1] in gens: f.write('(g' + str(paths[p][1]) + '=1)') elif paths[p][1] in busac: f.write('(b' + str(paths[p][1]) + '=1)') elif paths[p][1] in rus: f.write('(ru' + str(paths[p][1]) + '=1)') elif paths[p][1] in busdc: f.write('(b' + str(paths[p][1]) + '=1)') elif paths[p][1] in null: f.write('(b' + str(paths[p][1]) + '=1)') else: pass for j in range(2, len(paths[p])): if paths[p][j] in gens: f.write(' & (g' + str(paths[p][j]) + '=1)') elif paths[p][j] in busac: f.write(' & (b' + str(paths[p][j]) + '=1)') elif paths[p][j] in rus: f.write(' & (ru' + str(paths[p][j]) + '=1)') elif paths[p][1] in busdc: f.write(' & (b' + str(paths[p][1]) + '=1)') elif paths[p][1] in null: f.write(' & (b' + str(paths[p][1]) + '=1)') else: pass for k in range(0, len(C)): if C[k][0] < C[k][1]: f.write(' & (c' + str(C[k][0]) + str(C[k][1]) + '=1)') else: f.write(' & (c' + str(C[k][1]) + str(C[k][0]) + '=1)') f.write("'" '\n') C = []
def odd_even_fault_tolerance_metric(network_size, routing_type): turns_health_2d_network = { "N2W": False, "N2E": False, "S2W": False, "S2E": False, "W2N": False, "W2S": False, "E2N": False, "E2S": False } Config.ag.topology = '2DMesh' Config.ag.x_size = network_size Config.ag.y_size = network_size Config.ag.z_size = 1 Config.RotingType = routing_type all_odd_evens_file = open( 'Generated_Files/Turn_Model_Eval/' + str(network_size) + "x" + str(network_size) + '_OE_metric_' + Config.RotingType + '.txt', 'w') all_odd_evens_file.write("TOPOLOGY::" + str(Config.ag.topology) + "\n") all_odd_evens_file.write("X SIZE:" + str(Config.ag.x_size) + "\n") all_odd_evens_file.write("Y SIZE:" + str(Config.ag.y_size) + "\n") all_odd_evens_file.write("Z SIZE:" + str(Config.ag.z_size) + "\n") ag = copy.deepcopy(AG_Functions.generate_ag()) shmu = SystemHealthMonitoringUnit.SystemHealthMonitoringUnit() turns_health = copy.deepcopy(turns_health_2d_network) shmu.setup_noc_shm(ag, turns_health, False) noc_rg = copy.deepcopy( Routing.generate_noc_route_graph(ag, shmu, [], False, False)) classes_of_doa_ratio = [] turn_model_class_dict = {} tm_counter = 0 for turn_model in all_odd_even_list: sys.stdout.write("\rnumber of processed turn models: %i " % tm_counter) sys.stdout.flush() tm_counter += 1 link_dict = {} turn_model_index = all_odd_even_list.index(turn_model) turn_model_odd = turn_model[0] turn_model_even = turn_model[1] update_rg_odd_even(ag, turn_model_odd, turn_model_even, shmu, noc_rg) number_of_pairs = len(ag.nodes()) * (len(ag.nodes()) - 1) all_paths_in_graph = [] for source_node in ag.nodes(): for destination_node in ag.nodes(): if source_node != destination_node: if is_destination_reachable_from_source( noc_rg, source_node, destination_node): if Config.RotingType == 'MinimalPath': shortest_paths = list( all_shortest_paths( noc_rg, str(source_node) + str('L') + str('I'), str(destination_node) + str('L') + str('O'))) paths = [] for path in shortest_paths: minimal_hop_count = manhattan_distance( source_node, destination_node) if (len(path) / 2) - 1 <= minimal_hop_count: paths.append(path) all_paths_in_graph.append(path) else: paths = list( all_simple_paths( noc_rg, str(source_node) + str('L') + str('I'), str(destination_node) + str('L') + str('O'))) all_paths_in_graph += paths link_dict = find_similarity_in_paths(link_dict, paths) metric = 0 for item in link_dict.keys(): metric += link_dict[item] if Config.RotingType == 'MinimalPath': doa = degree_of_adaptiveness(ag, noc_rg, False) / float(number_of_pairs) metric = 1 / (float(metric) / len(ag.edges())) metric = float("{:3.3f}".format(metric)) else: doa_ex = extended_degree_of_adaptiveness( ag, noc_rg, False) / float(number_of_pairs) metric = 1 / (float(metric) / len(ag.edges())) metric = float("{:3.3f}".format(metric)) if metric not in classes_of_doa_ratio: classes_of_doa_ratio.append(metric) if metric in turn_model_class_dict.keys(): turn_model_class_dict[metric].append(turn_model_index) else: turn_model_class_dict[metric] = [turn_model_index] # return SHMU and RG back to default clean_rg_from_odd_even(ag, turn_model_odd, turn_model_even, shmu, noc_rg) all_odd_evens_file.write("classes of metric" + str(classes_of_doa_ratio) + "\n") all_odd_evens_file.write("----------" * 3 + "\n") all_odd_evens_file.write("turn models of class" + "\n") for item in sorted(turn_model_class_dict.keys()): all_odd_evens_file.write( str(item) + " " + str(turn_model_class_dict[item]) + "\n") all_odd_evens_file.write("----------" * 3 + "\n") all_odd_evens_file.write("distribution of turn models" + "\n") for item in sorted(turn_model_class_dict.keys()): temp_list = [] for tm in turn_model_class_dict[item]: turn_model = all_odd_even_list[tm] number_of_turns = len(turn_model[0]) + len(turn_model[1]) temp_list.append(number_of_turns) all_odd_evens_file.write( str(item) + " " + str(temp_list.count(8)) + " " + str(temp_list.count(9)) + " " + str(temp_list.count(10)) + " " + str(temp_list.count(11)) + " " + str(temp_list.count(12)) + "\n") all_odd_evens_file.close() return turn_model_class_dict
def divide(self, subnet, subnet_tree=None, t=None, h=None): """Attempt to divide subnet Parameters ---------- subnet : (int, int) subnet to attempt division subnet_tree : networkx.DiGraph, optional subnet hierarchy tree with additional labels if not given, it will be calculated automatically t : int, default None division parameter, used in similarity checks h : int, default None division parameter, used as a depth threshold if None - depth is unlimited Returns ------- success : bool True if factorization was sucessful False if it cannot be done See Also -------- similarity """ h = h if h is not None else math.inf subnet_tree = subnet_tree if subnet_tree is not None else self.subnet_tree( ) s, e = subnet def division_equivalence(node1, node2): return self.similarity(self, node1, e, node2, e, t=t) deep_nodes = set() close_nodes = set() for node in self.between_nodes(s, e, edge_cases=False): if nx.shortest_path_length(self, s, node) > h: deep_nodes.add(node) elif self.out_degree(node) > 1: close_nodes.add(node) classes, partition = equivalence_partition(close_nodes, division_equivalence) # remove child nodes from classes # otherwise we may merge child with parent (extremely illegal) for eq_class in classes: for curr_node in list(eq_class): if any(curr_node != node and nx.has_path(self, node, curr_node) for node in eq_class): eq_class.remove(curr_node) partition.pop(curr_node) valid_close = [] valid_deep = [] paths = list(nx.all_simple_paths(self, s, e)) for eq_class in classes: # ignore classes with less then 2 nodes to merge if len(eq_class) < 2: continue flag = False deep_part = set() for path in paths: flag = False for node in path: if node in eq_class: flag = True break if node in deep_nodes: flag = True deep_part.add(node) break # no path through node in eq_class or through deep node if not flag: break if flag: valid_close.append(eq_class) valid_deep.append(deep_part) if not valid_close: return None # print(close_nodes, deep_nodes, classes, partition, valid_close, valid_deep, '\n', sep='\n') # if many valid sets, select ones with smallest total amount of elements valids = zip(valid_close, valid_deep) valid_close, valid_deep = min( valids, key=lambda c_d: len(c_d[0]) + len(c_d[1])) first_node = valid_close.pop() net = Pnet(self) left_net = net.subcopy(s, first_node) right_net = net.subcopy(first_node, e) for node in valid_close: left_net = left_net.compose(net, other_start=s, other_end=node) right_net = right_net.compose(net, other_start=node, other_end=e) if right_net is None: return None new_subnet = Pnet.sequence_join(left_net, right_net) net.replace(new_subnet, s, e) return net
def pull_contigs_from_component(assembly, component, min_edge_trim_weight, assembly_max_paths, log=DEVNULL): """ builds contigs from the a connected component of the assembly DeBruijn graph Args: assembly (DeBruijnGraph): the assembly graph component (list): list of nodes which make up the connected component min_edge_trim_weight (int): the minimum weight to not remove a non cutting edge/path assembly_max_paths (int): the maximum number of paths allowed before the graph is further simplified log (function): the log function Returns: :class:`Dict` of :class:`int` by :class:`str`: the paths/contigs and their scores """ path_scores = {} # path_str => score_int w = min_edge_trim_weight unresolved_components = [component] while unresolved_components: # since now we know it's a tree, the assemblies will all be ltd to # simple paths component = unresolved_components.pop(0) paths_est = len(assembly.get_sinks(component)) * len( assembly.get_sources(component)) if paths_est > assembly_max_paths: edge_weights = sorted([ e[2]['freq'] for e in assembly.all_edges(assembly.get_sources(component) | assembly.get_sinks(component), data=True) ]) w = max([w + 1, edge_weights[0]]) if w > edge_weights[-1]: continue log( 'reducing estimated paths. Current estimate is {}+ from'. format(paths_est), len(component), 'nodes', 'filter increase', w) assembly.trim_forks_by_freq(w) assembly.trim_noncutting_paths_by_freq(w) assembly.trim_tails_by_freq(w) unresolved_components.extend( digraph_connected_components(assembly, component)) else: for source, sink in itertools.product( assembly.get_sources(component), assembly.get_sinks(component)): paths = list(nx.all_simple_paths(assembly, source, sink)) for path in paths: s = path[0] + ''.join([p[-1] for p in path[1:]]) score = 0 for i in range(0, len(path) - 1): score += assembly.get_edge_freq(path[i], path[i + 1]) path_scores[s] = max(path_scores.get(s, 0), score) return path_scores
def compute_periodicity_all_simple_paths_algorithm(self): """ Returns: """ self_loop_nodes = list( nx.nodes_with_selfloops(self._connected_subgraph)) all_nodes_independent_cell_image_vectors = [] my_simple_graph = nx.Graph(self._connected_subgraph) for test_node in self._connected_subgraph.nodes(): # TODO: do we need to go through all test nodes ? this_node_cell_img_vectors = [] if test_node in self_loop_nodes: for key, edge_data in self._connected_subgraph[test_node][ test_node].items(): if edge_data["delta"] == (0, 0, 0): raise ValueError( "There should not be self loops with delta image = (0, 0, 0)." ) this_node_cell_img_vectors.append(edge_data["delta"]) for d1, d2 in itertools.combinations(this_node_cell_img_vectors, 2): if d1 == d2 or d1 == tuple(-ii for ii in d2): raise ValueError( "There should not be self loops with the same (or opposite) delta image." ) this_node_cell_img_vectors = get_linearly_independent_vectors( this_node_cell_img_vectors) # Here, we adopt a cutoff equal to the size of the graph, contrary to the default of networkX (size - 1), # because otherwise, the all_simple_paths algorithm fail when the source node is equal to the target node. paths = [] # TODO: its probably possible to do just a dfs or bfs traversal instead of taking all simple paths! test_node_neighbors = my_simple_graph.neighbors(test_node) breaknodeloop = False for test_node_neighbor in test_node_neighbors: # Special case for two nodes if len(self._connected_subgraph[test_node] [test_node_neighbor]) > 1: this_path_deltas = [] node_node_neighbor_edges_data = list( self._connected_subgraph[test_node] [test_node_neighbor].values()) for edge1_data, edge2_data in itertools.combinations( node_node_neighbor_edges_data, 2): delta1 = get_delta(test_node, test_node_neighbor, edge1_data) delta2 = get_delta(test_node_neighbor, test_node, edge2_data) this_path_deltas.append(delta1 + delta2) this_node_cell_img_vectors.extend(this_path_deltas) this_node_cell_img_vectors = get_linearly_independent_vectors( this_node_cell_img_vectors) if len(this_node_cell_img_vectors) == 3: break for path in nx.all_simple_paths( my_simple_graph, test_node, test_node_neighbor, cutoff=len(self._connected_subgraph), ): path_indices = [nodepath.isite for nodepath in path] if path_indices == [ test_node.isite, test_node_neighbor.isite ]: continue path_indices.append(test_node.isite) path_indices = tuple(path_indices) if path_indices not in paths: paths.append(path_indices) else: continue path.append(test_node) # TODO: there are some paths that appears twice for cycles, and there are some paths that should # probably not be considered this_path_deltas = [np.zeros(3, np.int)] for (node1, node2) in [(node1, path[inode1 + 1]) for inode1, node1 in enumerate(path[:-1])]: this_path_deltas_new = [] for key, edge_data in self._connected_subgraph[node1][ node2].items(): delta = get_delta(node1, node2, edge_data) for current_delta in this_path_deltas: this_path_deltas_new.append(current_delta + delta) this_path_deltas = this_path_deltas_new this_node_cell_img_vectors.extend(this_path_deltas) this_node_cell_img_vectors = get_linearly_independent_vectors( this_node_cell_img_vectors) if len(this_node_cell_img_vectors) == 3: breaknodeloop = True break if breaknodeloop: break this_node_cell_img_vectors = get_linearly_independent_vectors( this_node_cell_img_vectors) independent_cell_img_vectors = this_node_cell_img_vectors all_nodes_independent_cell_image_vectors.append( independent_cell_img_vectors) # If we have found that the sub structure network is 3D-connected, we can stop ... if len(independent_cell_img_vectors) == 3: break self._periodicity_vectors = [] if len(all_nodes_independent_cell_image_vectors) != 0: for (independent_cell_img_vectors ) in all_nodes_independent_cell_image_vectors: if len(independent_cell_img_vectors) > len( self._periodicity_vectors): self._periodicity_vectors = independent_cell_img_vectors if len(self._periodicity_vectors) == 3: break
# Config src_addr = argv[1] dest_addr = argv[2] cutoff = int(argv[3]) with open('../grapher/tx_graph.dat', "rb") as infile: G = pickle.load(infile) print("Graph loaded.") with open(str(src_addr) + "_to_" + str(dest_addr) + ".txt", 'w') as f: paths = list( nx.all_simple_paths(G, source=src_addr, target=dest_addr, cutoff=cutoff)) print( "Added %d new paths from address %s to address %s with min length %d." % (len(new_paths), src_addr, dest_addr, min([len(x) for x in paths]))) # Sort paths by length paths.sort(key=len) tx_hashes = defaultdict(list) for i, path in enumerate(paths): for index in range(len(path) - 1): temp_hashes = [] for index, value in G[path[index]][path[index + 1]].items():
Graph.clear() Graph.nodes() Graph.edges() # In[ ]: Graph.nodes() # In[ ]: Graph.add_nodes_from(['a', 'b', 'c', 'd']) Graph.add_edges_from([('a', 'b'), ('a', 'c'), ('a', 'd'), ('b', 'c'), ('b', 'd'), ('d', 'c')]) print('all paths') for path in nx.all_simple_paths(Graph, source='a', target='c'): print(path) # In[ ]: nx.draw_networkx(Graph) plt.show() # In[ ]: Graph.clear() # In[ ]: Graph.add_path([3, 5, 4, 1, 0, 2, 7, 8, 9, 6]) Graph.add_path([3, 0, 6, 4, 2, 7, 1, 9, 8, 5])
def get_variants(self, probabilities=False, generate_users=False, generate_long_term_dependencies=False): """ Return all possible variants from the process model. If probabilities is set to True the implicit probabilities similar to a random walk are returned as well :param probabilities: boolean, return the probabilities :param generate_users: boolean, generate user attribute :param generate_long_term_dependencies: boolean, generate long-term dependencies in each trace :return: """ self.variants = EventLog() num_users = None if generate_users: num_users = np.random.randint(10, 30) users = np.arange(num_users) self.variants.attr['attr_dims'] = { 'name': len(self.graph.node), 'user': int(num_users) + 2 } g = self.graph for key in g.node.keys(): random_users = np.sort( np.random.choice(users, np.random.randint(1, 5), replace=False)).tolist() random_users = [str(u) for u in random_users] if key in [self.start_event, self.end_event]: random_users = None g.node[key]['_possible_users'] = random_users for path in sorted(nx.all_simple_paths(self.graph, source=self.start_event, target=self.end_event)): path = path[1:-1] # remove BOS and EOS trace = Trace(label='normal') if generate_users: trace.attr["user_voc_size"] = num_users for event in path: trace.add_event(Event(name=event, start_time=None, **dict(self.graph.node[event].items()))) self.variants.add_trace(trace) if generate_long_term_dependencies: # add long term dependencies to every variant # this means that each variant will have exactly one long term dependency where the user must # be the same for two events for variant in self.variants: random_events = np.sort( np.random.choice(range(len(variant.events)), 2, replace=False)) # remove BOS and EOS random_attr = np.random.choice(list(variant[random_events[1]].attr.keys()), replace=False) head = random_events[0] tail = random_events[1:] for idx in tail: variant[idx].attr[random_attr] = int(head) # point to earlier event if not self.variant_probabilities and probabilities: self.variant_probabilities = [] for variant in self.variants: p = np.product( [1.0 / max(1.0, len([edge[1] for edge in self.graph.edges() if edge[0] == event.name])) for event in variant]) self.variant_probabilities.append(p) if probabilities: return self.variants, self.variant_probabilities else: return self.variants
def test(): all_odd_evens_file = open( 'Generated_Files/Turn_Model_Lists/all_odd_evens_doa.txt', 'w') turns_health_2d_network = { "N2W": False, "N2E": False, "S2W": False, "S2E": False, "W2N": False, "W2S": False, "E2N": False, "E2S": False } Config.ag.topology = '2DMesh' Config.ag.x_size = 3 Config.ag.y_size = 3 Config.ag.z_size = 1 Config.RotingType = 'NonMinimalPath' ag = copy.deepcopy(AG_Functions.generate_ag()) number_of_pairs = len(ag.nodes()) * (len(ag.nodes()) - 1) max_ratio = 0 classes_of_doa_ratio = [] turn_model_class_dict = {} for turn_model in all_odd_even_list: #for item in selected_turn_models: #print item #turn_model = all_odd_even_list[item] #print turn_model turn_model_index = all_odd_even_list.index(turn_model) turn_model_odd = turn_model[0] turn_model_even = turn_model[1] turns_health = copy.deepcopy(turns_health_2d_network) shmu = SystemHealthMonitoringUnit.SystemHealthMonitoringUnit() shmu.setup_noc_shm(ag, turns_health, False) noc_rg = copy.deepcopy( Routing.generate_noc_route_graph(ag, shmu, [], False, False)) for node in ag.nodes(): node_x, node_y, node_z = AG_Functions.return_node_location(node) if node_x % 2 == 1: for turn in turn_model_odd: shmu.restore_broken_turn(node, turn, False) from_port = str(node) + str(turn[0]) + "I" to_port = str(node) + str(turn[2]) + "O" Routing.update_noc_route_graph(noc_rg, from_port, to_port, 'ADD') else: for turn in turn_model_even: shmu.restore_broken_turn(node, turn, False) from_port = str(node) + str(turn[0]) + "I" to_port = str(node) + str(turn[2]) + "O" Routing.update_noc_route_graph(noc_rg, from_port, to_port, 'ADD') #draw_rg(noc_rg) number_of_pairs = len(ag.nodes()) * (len(ag.nodes()) - 1) doa_ex = extended_degree_of_adaptiveness( ag, noc_rg, False) / float(number_of_pairs) doa = degree_of_adaptiveness(ag, noc_rg, False) / float(number_of_pairs) sum_of_paths = 0 sum_of_sim_ratio = 0 for source_node in ag.nodes(): for destination_node in ag.nodes(): if source_node != destination_node: if is_destination_reachable_from_source( noc_rg, source_node, destination_node): #print source_node, "--->", destination_node if Config.RotingType == 'MinimalPath': shortest_paths = list( all_shortest_paths( noc_rg, str(source_node) + str('L') + str('I'), str(destination_node) + str('L') + str('O'))) paths = [] for path in shortest_paths: minimal_hop_count = manhattan_distance( source_node, destination_node) if (len(path) / 2) - 1 <= minimal_hop_count: paths.append(path) else: paths = list( all_simple_paths( noc_rg, str(source_node) + str('L') + str('I'), str(destination_node) + str('L') + str('O'))) #for path in paths: # print path local_sim_ratio = 0 counter = 0 if len(paths) > 1: for i in range(0, len(paths)): for j in range(i, len(paths)): if paths[i] != paths[j]: sm = difflib.SequenceMatcher( None, paths[i], paths[j]) counter += 1 local_sim_ratio += sm.ratio() #print float(local_sim_ratio)/counter sum_of_sim_ratio += float( local_sim_ratio) / counter else: sum_of_sim_ratio += 1 if Config.RotingType == 'MinimalPath': print("Turn Model ", '%5s' % turn_model_index, "\tdoa:", "{:3.3f}".format(doa), "\tsimilarity ratio:", "{:3.3f}".format(sum_of_sim_ratio), "\t\tfault tolerance metric:", "{:3.5f}".format(float(doa) / sum_of_sim_ratio)) doa_ratio = float("{:3.5f}".format( float(doa) / sum_of_sim_ratio, 5)) else: print("Turn Model ", '%5s' % turn_model_index, "\tdoa:", "{:3.3f}".format(doa_ex), "\tsimilarity ratio:", "{:3.3f}".format(sum_of_sim_ratio), "\t\tfault tolerance metric:", "{:3.5f}".format(float(doa_ex) / sum_of_sim_ratio)) doa_ratio = float("{:3.5f}".format( float(doa_ex) / sum_of_sim_ratio, 5)) if doa_ratio not in classes_of_doa_ratio: classes_of_doa_ratio.append(doa_ratio) if doa_ratio in list(turn_model_class_dict.keys()): turn_model_class_dict[doa_ratio].append(turn_model_index) else: turn_model_class_dict[doa_ratio] = [turn_model_index] if max_ratio < doa_ratio: max_ratio = doa_ratio #print "--------------------------------------------" del noc_rg print("max doa_ratio", max_ratio) print("classes of doa_ratio", classes_of_doa_ratio) for item in sorted(turn_model_class_dict.keys()): print(item, turn_model_class_dict[item]) return None
def load_step(file_obj, file_type=None): ''' Use the STEPtools Inc. Author Tools binary to mesh a STEP file, and return a list of Trimesh objects. Using this over openCASCADE as it is signifigantly more stable (though not OSS.) STEPtools Inc. provides the binary under this license: http://www.steptools.com/demos/license_author.html To install the required binary ('export_product_asm') into PATH: wget http://www.steptools.com/demos/stpidx_author_linux_x86_64_16.0.zip unzip stpidx_author_linux_x86_64_16.0.zip sudo cp stpidx_author_linux_x86_64/bin/export_product_asm /usr/bin/ Arguments ---------- file_obj: file like object containing step file file_type: unused Returns ---------- meshes: list of Trimesh objects (with correct metadata set from STEP file) ''' with NamedTemporaryFile() as out_file: with NamedTemporaryFile(suffix='.STEP') as in_file: if hasattr(file_obj, 'read'): in_file.write(file_obj.read()) in_file.seek(0) file_name = in_file.name else: file_name = file_obj check_call([ _STEP_FACETER, file_name, '-tol', str(res.mesh), '-o', out_file.name ]) t = cElementTree.parse(out_file) meshes = {} # get the meshes without metadata from the XML document for shell in t.findall('shell'): # query the xml structure for vertices and faces vertices = np.array( [v.get('p').split() for v in shell.findall('.//v')], dtype=np.float) faces = np.array([f.get('v').split() for f in shell.findall('.//f')], dtype=np.int) # normals aren't always returned but faces have correct winding # so they are autogenerated correctly from dot products mesh = {'vertices': vertices, 'faces': faces, 'metadata': {}} # store the mesh by id reference meshes[shell.get('id')] = mesh try: # populate the graph of shapes and transforms g = nx.MultiDiGraph() # keys: {mesh id : shape id} mesh_shape = {} # assume that the document has consistant units to_inches = None for shape in t.findall('shape'): shape_id = shape.get('id') shape_unit = shape.get('unit') mesh_id = shape.get('shell') if not shape_unit is None: to_inches = float(shape_unit.split()[1]) * _METERS_TO_INCHES if not mesh_id is None: for i in mesh_id.split(): mesh_shape[i] = shape_id #g.node[shape_id]['mesh'] = mesh_id g.add_node(shape_id, {'mesh': mesh_id}) for child in shape.getchildren(): child_id = child.get('ref') transform = np.array(child.get('xform').split(), dtype=np.float).reshape((4, 4)).T g.add_edge(shape_id, child_id, transform=transform) # which product ID has the root shape prod_root = t.getroot().get('root') shape_root = None for prod in t.findall('product'): prod_id = prod.get('id') prod_name = prod.get('name') prod_shape = prod.get('shape') if prod_id == prod_root: shape_root = prod_shape g.node[prod_shape]['product_name'] = prod_name # now that the assembly tree has been populated, traverse it to # find the final transforms and quantities for the meshes we extracted for mesh_id in meshes.keys(): shape_id = mesh_shape[mesh_id] transforms_all = deque() path_str = deque() if shape_id == shape_root: paths = [[shape_id, shape_id]] else: paths = nx.all_simple_paths(g, shape_root, shape_id) paths = np.array(list(paths)) garbage, unique = np.unique(['.'.join(i) for i in paths], return_index=True) paths = paths[unique] for path in paths: path_name = [g.node[i]['product_name'] for i in path[:-1]] edges = np.column_stack( (path[:-1], path[:-1])).reshape(-1)[1:-1].reshape((-1, 2)) transforms = [np.eye(4)] for e in edges: # get every transform from the edge local = [ i['transform'] for i in g.edge[e[0]][e[1]].values() ] # all the transforms are sequential, so we want combinations transforms = [ np.dot(*i) for i in itertools.product(transforms, local) ] transforms_all.extend(transforms) path_str.extend(['/'.join(path_name)] * len(transforms)) meshes[mesh_id]['vertices'] *= to_inches meshes[mesh_id]['metadata']['units'] = 'inches' meshes[mesh_id]['metadata']['name'] = path_name[-1] meshes[mesh_id]['metadata']['paths'] = np.array(path_str) meshes[mesh_id]['metadata']['quantity'] = len(transforms_all) meshes[mesh_id]['metadata']['transforms'] = np.array( transforms_all) except: log.error('STEP load processing error, aborting metadata!', exc_info=True) return meshes.values()
def outputKshortestPath(self): self.numSubpath = 0 f = open("./Graph/linkonpath.txt", "w") fn = open("./Graph/nodeonpath.txt", "w") f.writelines("%d %d %d\n" % (self.NODENUM, self.NUMLINK, self.pathNum)) for src in self.NODE_DICT: for sink in self.NODE_DICT: if src != sink: pathcount = 0 m_pathcount = [0 for i in range(self.cutoff + 1)] for path in nx.all_shortest_paths(self.topo, source=src, target=sink): f.writelines("%d %d " % (src, sink)) pathcount += 1 m_pathcount[len(path) - 1] += 1 self.numSubpath += 1 pre = 0 fn.writelines("%d " % path[pre]) while pre < len(path) - 1: cur = pre + 1 if path[pre] in self.LINK_DICT and path[ cur] in self.LINK_DICT[path[pre]]: f.writelines( "%d " % self.LINK_DICT[path[pre]][path[cur]].id) fn.writelines("%d " % path[cur]) else: f.writelines( "%d " % self.LINK_DICT[path[cur]][path[pre]].id) pre = pre + 1 f.writelines("\n") fn.writelines("\n") if pathcount >= self.pathNum: break if pathcount < self.pathNum: for path in nx.all_simple_paths(self.topo, source=src, target=sink, cutoff=self.cutoff): f.writelines("%d %d " % (src, sink)) pathcount += 1 m_pathcount[len(path) - 1] += 1 self.numSubpath += 1 pre = 0 fn.writelines("%d " % path[pre]) while pre < len(path) - 1: cur = pre + 1 if path[pre] in self.LINK_DICT and path[ cur] in self.LINK_DICT[path[pre]]: f.writelines( "%d " % self.LINK_DICT[path[pre]][path[cur]].id ) fn.writelines("%d " % path[cur]) else: f.writelines( "%d " % self.LINK_DICT[path[cur]][path[pre]].id ) pre = pre + 1 f.writelines("\n") fn.writelines("\n") if pathcount >= self.pathNum: break f.close() fn.close()
""" Created on Sat Dec 18 14:43:04 2021 @author: wapisani """ import os import numpy as np import networkx as nx directory = r'F:\Documents\Programming\AoC\2021' # directory = r'/Users/wapisani/Documents/Programming/AoC/2021' os.chdir(directory) # with open('input_day12.txt','r') as handle: # data = [line.strip() for line in handle.readlines()] with open('sample_day12.txt', 'r') as handle: data = [line.strip() for line in handle.readlines()] edges = [] for line in data: edge = line.split('-') edges.append((edge[0], edge[1])) G = nx.Graph() for edge in edges: G.add_edge(*edge) paths = nx.all_simple_paths(G, 'start', 'end')
def dci_orient(X1, X2, skeleton: set, nodes_cond_set: set, rh1: RegressionHelper = None, rh2: RegressionHelper = None, alpha: float = 0.1, max_set_size: int = 3, verbose: int = 0): """ Orients edges in the skeleton of the difference DAG. Parameters ---------- X1: array, shape = [n_samples, n_features] First dataset. X2: array, shape = [n_samples, n_features] Second dataset. skeleton: set Set of edges in the skeleton of the difference-DAG. nodes_cond_set: set Nodes to be considered as conditioning sets. rh1: RegressionHelper, default = None Sufficient statistics estimated based on samples in the first dataset, stored in RegressionHelper class. rh2: RegressionHelper, default = None Sufficient statistics estimated based on samples in the second dataset, stored in RegressionHelper class. alpha: float, default = 0.1 Significance level parameter for determining orientation of an edge. Lower alpha results in more directed edges in the difference-DAG. max_set_size: int, default = 3 Maximum conditioning set size used to test regression invariance. Smaller maximum conditioning set size results in faster computation time. For large datasets recommended max_set_size is 3. verbose: int, default = 0 The verbosity level of logging messages. See Also -------- dci, dci_undirected_graph, dci_skeleton Returns ------- oriented_edges: set Set of edges in the skeleton of the difference-DAG for which directionality could be determined. unoriented_edges: set Set of edges in the skeleton of the difference-DAG for which directionality could not be determined. """ if verbose > 0: print("DCI edge orientation...") assert 0 <= alpha <= 1, "alpha must be in [0,1] range." if rh1 is None or rh2 is None: # obtain sufficient statistics suffstat1 = gauss_ci_suffstat(X1) suffstat2 = gauss_ci_suffstat(X2) rh1 = RegressionHelper(suffstat1) rh2 = RegressionHelper(suffstat2) nodes = {i for i, j in skeleton} | {j for i, j in skeleton} oriented_edges = set() n1 = rh1.suffstat['n'] n2 = rh2.suffstat['n'] for i, j in skeleton: for cond_i, cond_j in zip( powerset(nodes_cond_set - {i}, r_max=max_set_size), powerset(nodes_cond_set - {j}, r_max=max_set_size)): # compute residual variances for i beta1_i, var1_i, _ = rh1.regression(i, list(cond_i)) beta2_i, var2_i, _ = rh2.regression(i, list(cond_i)) # compute p-value for invariance of residual variances for i pvalue_i = ncfdtr(n1 - len(cond_i), n2 - len(cond_i), 0, var1_i / var2_i) pvalue_i = 2 * min(pvalue_i, 1 - pvalue_i) # compute residual variances for j beta1_j, var1_j, _ = rh1.regression(j, list(cond_j)) beta2_j, var2_j, _ = rh2.regression(j, list(cond_j)) # compute p-value for invariance of residual variances for j pvalue_j = ncfdtr(n1 - len(cond_j), n2 - len(cond_j), 0, var1_j / var2_j) pvalue_j = 2 * min(pvalue_j, 1 - pvalue_j) if ((pvalue_i > alpha) | (pvalue_j > alpha)): # orient the edge according to highest p-value if pvalue_i > pvalue_j: edge = (j, i) if j in cond_i else (i, j) pvalue_used = pvalue_i else: edge = (i, j) if i in cond_j else (j, i) pvalue_used = pvalue_j oriented_edges.add(edge) if verbose > 0: print( "Oriented (%d, %d) as %s since p-value=%.5f > alpha=%.5f" % (i, j, edge, pvalue_used, alpha)) break # orient edges via graph traversal unoriented_edges_before_traversal = skeleton - oriented_edges - { (j, i) for i, j in oriented_edges } unoriented_edges = unoriented_edges_before_traversal.copy() g = nx.DiGraph() for i, j in oriented_edges: g.add_edge(i, j) g.add_nodes_from(nodes) for i, j in unoriented_edges_before_traversal: chain_path = list(nx.all_simple_paths(g, source=i, target=j)) if len(chain_path) > 0: oriented_edges.add((i, j)) unoriented_edges.remove((i, j)) if verbose > 0: print("Oriented (%d, %d) as %s with graph traversal" % (i, j, (i, j))) else: chain_path = list(nx.all_simple_paths(g, source=j, target=i)) if len(chain_path) > 0: oriented_edges.add((j, i)) unoriented_edges.remove((i, j)) if verbose > 0: print("Oriented (%d, %d) as %s with graph traversal" % (i, j, (j, i))) # form an adjacency matrix containing directed and undirected edges num_nodes = X1.shape[1] adjacency_matrix = edges2adjacency( num_nodes, unoriented_edges, undirected=True) + edges2adjacency( num_nodes, oriented_edges, undirected=False) return adjacency_matrix
def main(): print("============================================================") print("SPICE PARSER AND AUTOMATIC STACK CALCULATOR") print("============================================================") print("By: Rodrigo N. Wuerdig") print("Contact: [email protected]") print_gmelogo() #=========================================================================== #Fetch Args #=========================================================================== if (sys.argv[1] == None): print("WARNING: ARG1 SHOULD CONTAIN WP/WN RATIO") wpwn_ratio = 2.0 else: wpwn_ratio = float(sys.argv[1]) if (sys.argv[2] == None): print("ERROR: ARG2 SHOULD CONTAIN SPICE FILE") return -1 else: file = sys.argv[2] #=========================================================================== #Open Spice File #=========================================================================== f = open(file, "r") ntransistors = [] #Start List for N Transistors ptransistors = [] #Start List for P Transistors inputs = [] #Start List for Inputs outputs = [] #Start List for Output Nodes for line in f: newline = line.rstrip('\n') #Check if the line starts with *.pininfo if "*.pininfo" in newline.lower(): #Fetches Outputs from the pininfo line outpins = re.findall('[a-zA-Z0-9]*:[Oo]', newline) #Fetches Inputs from the pininfo line inpins = re.findall('[a-zA-Z0-9]*:[Ii]', newline) #Fetches Vdd pin from the pininfo line vddpin = str(re.search('[a-zA-Z0-9]*:[Pp]', newline)[0]) #Fetches Gnd pin from the pininfo line gndpin = str(re.search('[a-zA-Z0-9]*:[Gg]', newline)[0]) #Check if its missing output pins if is_empty(outpins): print("pattern not found outputs") else: for out in outpins: print("Output Pins:", out) outputs.append(out.replace(':O', '')) #Check if its missing output pins if is_empty(inpins): print("pattern not found outputs") else: for in_pin in inpins: print("input Pins:", in_pin) inputs.append(in_pin.replace(':O', '')) #Check if its missing vdd pins if is_empty(vddpin): print("pattern not found outputs") return -3 else: print("Circuit Supply Pin:", vddpin) vddpin = vddpin.replace(':P', '') #Check if its missing gnd pins if is_empty(gndpin): print("pattern not found outputs") return -3 else: print("Circuit Ground Pin:", gndpin) gndpin = gndpin.replace(':G', '') #=========================================================================== #Transistor Lines elif ("pch" in newline.lower()) or ("nch" in newline.lower()): print("\n=========================") name = newline.split()[0] print("Name:", name) source = newline.split()[1] print("Source:", source) gate = newline.split()[2] print("Gate:", gate) drain = newline.split()[3] print("Drain:", drain) bulk = newline.split()[4] print("Bulk:", bulk) ttype = newline.split()[5] print("Type:", ttype) wsize = re.findall('[Ww]=[0-9Ee]*.[0-9Ee]*[\-+0-9]*', newline) if is_empty(wsize): print("pattern not found W size") else: wsize = wsize[0].replace('w=', '') wsize = wsize.replace('W=', '') wsize = float(wsize) print("W Size:", wsize) lsize = re.findall('[Ll]=[0-9Ee]*.[0-9Ee]*[\-+0-9]*', newline) if is_empty(lsize): print("pattern not found L Size") else: lsize = lsize[0].replace('l=', '') lsize = lsize.replace('L=', '') lsize = float(lsize) print("L Size:", lsize) fingers = re.findall('nf=[0-9]*', newline.lower()) if is_empty(fingers): print("pattern not found: Number of Fingers") fingers = 1 else: fingers = fingers[0].replace('nf=', '') fingers = fingers.replace('NF=', '') fingers = int(fingers) print("Fingers:", fingers) if (ttype.lower() == "pch"): mos = Transistor(name, source, gate, drain, bulk, ttype, wsize, fingers, lsize) ptransistors.append(mos) elif (ttype.lower() == "nch"): mos = Transistor(name, source, gate, drain, bulk, ttype, wsize, fingers, lsize) ntransistors.append(mos) f.close() #=========================================================================== #Prints Number of Fetched Transistors #=========================================================================== print("\n\n============================================================") print("The Circuit Contains:") print("PMOS TRANSISTORS", len(ptransistors)) print("NMOS TRANSISTORS", len(ntransistors)) print("\n\n============================================================") #=========================================================================== #Creates Networkx Node Graph and Include Nodes #=========================================================================== G = nx.Graph() #Creates an graph called G color_map = [] #list that will define node colors node_size = [] #list that will define node sizes #----------------------------------------- #Searches Nodes and Color them #----------------------------------------- G.add_node(vddpin) #create vdd node color_map.append('green') node_size.append(2000) G.add_node(gndpin) #create gnd node color_map.append('green') node_size.append(2000) for outpin in outputs: G.add_node(outpin) color_map.append('magenta') node_size.append(1000) for n in ptransistors: G.add_node(n.get_name()) color_map.append('red') node_size.append(500) for n in ntransistors: G.add_node(n.get_name()) color_map.append('blue') node_size.append(500) for n in ptransistors: G.add_edge(n.get_name(), n.get_source()) color_map.append('yellow') node_size.append(100) G.add_edge(n.get_name(), n.get_drain()) color_map.append('yellow') node_size.append(100) for n in ntransistors: G.add_edge(n.get_name(), n.get_source()) color_map.append('yellow') node_size.append(100) G.add_edge(n.get_name(), n.get_drain()) color_map.append('yellow') node_size.append(100) #=========================================================================== #Fetches Common Nodes #=========================================================================== common_nodes = [] for n in ntransistors: for p in ptransistors: if (n.get_drain() == p.get_drain()): common_nodes.append(n.get_drain()) elif (n.get_drain() == p.get_source()): common_nodes.append(n.get_drain()) elif (n.get_source() == p.get_drain()): common_nodes.append(n.get_source()) elif (n.get_source() == p.get_source()): common_nodes.append(n.get_source()) common_nodes = list(dict.fromkeys(common_nodes)) #=========================================================================== #Searches Euler Paths from COMMON_NODE to VDD #=========================================================================== for common_node in common_nodes: print("PATH FROM", common_node, "TO", vddpin) print("============================================================") for path in nx.all_simple_paths(G, source=common_node, target=vddpin): nodes_path_p = [] stack = 0 if not (gndpin) in path: print("Full Path:", path) for node in ptransistors: if node.get_name() in path: stack = stack + 1 nodes_path_p.append(node) for node in nodes_path_p: if node.get_stack() < stack: node.set_stack(stack) print("Stack Size =", stack) print( "============================================================") #=========================================================================== #Searches Euler Paths from COMMON_NODE to VSS #=========================================================================== for common_node in common_nodes: print("PATH FROM", common_node, "TO", gndpin) print("============================================================") for path in nx.all_simple_paths(G, source=common_node, target=gndpin): nodes_path_n = [] stack = 0 if not (vddpin) in path: print("Full Path:", path) for node in ntransistors: if node.get_name() in path: stack = stack + 1 nodes_path_n.append(node) for node in nodes_path_n: if node.get_stack() < stack: node.set_stack(stack) print("Stack Size =", stack) print( "============================================================") #=========================================================================== #Drawn Plot #=========================================================================== print("============================================================") nx.draw(G, node_size=node_size, node_color=color_map, with_labels=True) #=========================================================================== #Print Calculed Stack Size for Each Transistor #=========================================================================== for node in ptransistors: sizew = node.get_wsize() * node.get_stack() * float(wpwn_ratio) print("Node:", node.get_name(), "StackFactor:", node.get_stack(), "Calculated Size:", sizew, " Original Size:", node.get_wsize()) for node in ntransistors: sizew = node.get_wsize() * node.get_stack() print("Node:", node.get_name(), "StackFactor:", node.get_stack(), "Calculated Size:", sizew, " Original Size:", node.get_wsize()) plt.show() #=========================================================================== #Write File #=========================================================================== file = sys.argv[2] in_file = open(file, "r") file2 = "out_" + file out_file = open(file2, "w") for line in in_file: found = 0 for node in ptransistors: if node.get_name() in line: sizew = node.get_wsize() * node.get_stack() * float(wpwn_ratio) out_file.write(node.get_name() + " " + node.get_source() + " " + node.get_gate() + " " + node.get_drain() + " " + node.get_bulk() + " " + node.get_ttype() + " W=" + str(sizew) + " NF=" + str(node.get_fingers()) + " L=" + str(node.get_lsize()) + "\n") found = 1 for node in ntransistors: if node.get_name() in line: sizew = node.get_wsize() * node.get_stack() out_file.write(node.get_name() + " " + node.get_source() + " " + node.get_gate() + " " + node.get_drain() + " " + node.get_bulk() + " " + node.get_ttype() + " W=" + str(sizew) + " NF=" + str(node.get_fingers()) + " L=" + str(node.get_lsize()) + "\n") found = 1 if found != 1: out_file.write(line) in_file.close() out_file.close() return 0