Example #1
0
def maximum_shortest_path(G):
	max_path = 0
	for i in nx.nodes_iter(G):
		for j in nx.nodes_iter(G):
			if nx.has_path(G, i,j):
				path = nx.shortest_path_length(G, i, j)
				if path > max_path:
					max_path = path
	return max_path
Example #2
0
def find_nodes_with_degree(graph, filter_function):
    junctures = []
    for node in nx.nodes_iter(graph):
        degree = nx.degree(graph, node)
        if filter_function(degree):
            junctures.append(node)
    return junctures
Example #3
0
    def compare_list(self, graph_list, types, h, D):
        """
        Compute the all-pairs kernel values for a list of graph representations of verification tasks
        """
        all_graphs_number_of_nodes = 0
        node_labels = [0] * (h+1)
        node_depth = [0] * len(graph_list)
        edge_types = [0] * len(graph_list)
        edge_truth = [0] * len(graph_list)

        for it in range(h+1):
            node_labels[it] = [0] * len(graph_list)

        for i, g in enumerate(graph_list):
            node_labels[0][i] = {key: self._compress(value)
                                 for key, value in nx.get_node_attributes(g, 'label').items()}
            node_depth[i] = nx.get_node_attributes(g, 'depth')
            edge_types[i] = nx.get_edge_attributes(g, 'type')
            edge_truth[i] = nx.get_edge_attributes(g, 'truth')
            all_graphs_number_of_nodes += len([node for node in nx.nodes_iter(g) if node_depth[i][node] <= D])
            # if i == 0:
            #     self._graph_to_dot(g, node_labels[0][i], "graph{}.dot".format(i))

        # all_graphs_number_of_nodes is upper bound for number of possible edge labels
        phi = np.zeros((all_graphs_number_of_nodes, len(graph_list)), dtype=np.uint64)

        # h = 0
        for i, g in enumerate(graph_list):
            for node in g.nodes_iter():
                if node_depth[i][node] <= D:
                    label = node_labels[0][i][node]
                    phi[self._compress(label), i] += 1

        K = np.dot(phi.transpose(), phi)

        # h > 0
        for it in range(1, h+1):
            # Todo check if the shape fits in all cases
            phi = np.zeros((2*all_graphs_number_of_nodes, len(graph_list)), dtype=np.uint64)

            print('Updating node labels of graphs in iteration {}'.format(it), flush=True)

            # for each graph update edge labels
            for i, g in tqdm(list(enumerate(graph_list))):
                node_labels[it][i] = {}
                for node in g.nodes_iter():
                    if node_depth[i][node] <= D:
                        label_collection = self._collect_labels(node, i, g, it-1, node_labels, node_depth, types, D, edge_types, edge_truth)
                        long_label = "_".join(str(x) for x in [np.concatenate([np.array([node_labels[it-1][i][node]]),
                                                               np.sort(label_collection)])])
                        node_labels[it][i][node] = self._compress(long_label)
                        phi[self._compress(long_label), i] += 1
                        # node_labels[it][i][node] = long_label
                        # phi[self._compress(long_label), i] += 1
                # if i == 0:
                #     self._graph_to_dot(g, node_labels[it][i], "graph{}_it{}.dot".format(i, it))

            K += np.dot(phi.transpose(), phi)

        return K
Example #4
0
def getAllOpenTriangles(G, sets):   #  open[(u,v)]:  third edge of open triangles (u,v) in
    opens = {}          #   reverse:  node --> teamID
    index = {}
    # close = {}
    for v in nx.nodes_iter(G):
        s1 = set(G[v])
        for w in s1:
            s2 = set(G[w])
            pair = (v,w) if v < w else (w,v)
            
            if pair not in opens:
                opens[pair] = set()
                # close[pair] = set()
            else:   # add following two lines
                continue
            opens[pair] |= set([(i,v) if i < v else (v,i) for i in (s1 - s2 - set([w]))])
            opens[pair] |= set([(i,w) if i < w else (w,i) for i in (s2 - s1 - set([v]))])
            # close[pair] = s1 & s2
            for teamID, nodes in sets.iteritems():
                if v in nodes and w in nodes:
                    if pair not in index:
                        index[pair] = set()
                    index[pair].add(teamID)  # which teams edge(v,w) belongs to
    for pair in opens: 
        if opens[pair].empty(): del opens[pair]        
    return opens, index
 def getTopXScores(self, role, topX):
     scores = []
     for node in nx.nodes_iter(self.G):
         scores.append((node, self.getRoleScore(node, role)))
     # Sort by score:
     scores = sorted(scores, reverse=True, key=lambda tup: tup[1])
     return scores[0:topX]
Example #6
0
def analyze_event_sequence_graph(graph_file):
    G = cPickle.load(open(graph_file, 'rb'))
    
    sequences_by_degree = {}
    for n in nx.nodes_iter(G):
        if G.node[n]['type'] == 'section':
            sequences_by_degree[n] = G.degree(n)

    sorted_seq = sorted(sequences_by_degree.iteritems(), key=itemgetter(0))

    print sorted_seq

    # plot parameters
    imw = 1024.0 # the full image width
    imh = 1024.0
    lm = 40.0
    rm = 50.0
    tm = 50.0
    bm = 50.0
    res = 72.0
    
    imwi = imw/res
    imhi = imh/res
    fig = mplt.figure(figsize=(imwi, imhi), dpi=res)
    ph = imh - tm - bm # the height for both matricies
    pw = imw - lm - rm
    ax = fig.add_axes((lm/imw, bm/imh, pw/imw, ph/imh))

    ax.plot(range(len(sorted_seq)),[x[1] for x in sorted_seq])

    print [x for x in G.edges(sorted_seq[0][0], data=True)]
    def getRoleCounts(self):
        roleranks = dict()
        for role in self.Roles:
            scores = []
            for node in nx.nodes_iter(self.G):
                scores.append(self.getRoleScore(node, role))

            #Calculate ranks of scores (I don't understand this part either)
            u, v = np.unique(scores, return_inverse=True)
            ranks = (np.cumsum(np.concatenate(([0], np.bincount(v)))))[v]

            roleranks[role] = ranks

        rolenums = dict()
        for role in self.Roles:
            rolenums[role] = 0
        rolenums[None] = 0
        # Get highest ranked role for each node
        for node in range(nx.number_of_nodes(self.G)):
            highest_role = None
            max_rank = 0
            for role in self.Roles:
                if roleranks[role][node] > max_rank:
                    max_rank = roleranks[role][node]
                    highest_role = role
            print(highest_role)
            rolenums[highest_role] += 1
        return rolenums
Example #8
0
def create_graph_features(g):
    assert type(g) is not NoneType
    print "starting calculating graphcut features."
    deg_cent_dic = nx.degree_centrality(g)
    betw_cent_dic = nx.betweenness_centrality(g)
    clustering_coeff_dic = nx.clustering(g, weight=None)
    #    eigenvector_cent_list = nx.eigenvector_centrality(g, weight = None)

    egoNetList = []
    for n in nx.nodes_iter(g):
        resultObj = Result()
        resultObj.nodeId = n
        resultObj.egoNetGraph = nx.ego_graph(g,
                                             n,
                                             radius=1,
                                             center=True,
                                             undirected=False,
                                             distance=None)
        resultObj.egoNetDegree = nx.number_of_nodes(resultObj.egoNetGraph)
        resultObj.nofEdges = nx.number_of_edges(resultObj.egoNetGraph)

        # Assigning graph cut features
        resultObj.deg_cent = deg_cent_dic[n]
        resultObj.betw_cent = betw_cent_dic[n]
        resultObj.clustering_coeff = clustering_coeff_dic[n]
        # resultObj.eigenvector_cent = eigenvector_cent_list[n]
        egoNetList.append(resultObj)

    return egoNetList
Example #9
0
 def connected_observations(self, subgraph):
     objids = [node for node in nx.nodes_iter(subgraph)]
     points = [p for p in self.objects if p.objid in objids]
     cat = Catalog(points)
     cat.add_constant('obscode', 807)
     cat.add_constant('err', self.astrometric_err)
     return cat
Example #10
0
def IR(G):
    '''
    IRIE  算法
    '''
    r = [1] * len(G)  #初始化
    iteration = 20
    alpha = 0.7
    degreeG = nx.degree(G).values()  #节点的度
    while (iteration):
        for u in nx.nodes_iter(G):
            sumpr = 0
            for uneighbor in nx.all_neighbors(G, u):
                p = 1 / degreeG[uneighbor]  #传播概率
                sumpr = sumpr + p * r[uneighbor]
            r[u] = 1 + alpha * sumpr
        iteration = iteration - 1
    matG = []
    nodeG = nx.nodes(G)
    matG.append(nodeG)
    matG.append(r)
    matG = np.array(matG)
    result = matG.T[np.lexsort(-matG)].T  #按节点的度从大到小排
    vrank = result[0]
    vrank = vrank.astype(np.int32)
    return vrank
Example #11
0
def p3_free(input_graph):
    DEBUG = False
    if DEBUG == True:
        print "Called p3_free"
    G = input_graph
    for u in nx.nodes_iter(G):
        u_neighbors = G.neighbors(u)
        if DEBUG == True:
            print "u " + str(u)
            print u_neighbors
        for w in u_neighbors:
            w_neighbors = G.neighbors(w)
            w_neighbors.remove(u)  #remove bactrack
            if DEBUG == True:
                print "w " + str(w)
                print w_neighbors
            for x in w_neighbors:
                x_neighbors = G.neighbors(x)
                x_neighbors.remove(w)  #remove bactrack
                if DEBUG == True:
                    print "x " + str(x)
                    print x_neighbors
                if u not in x_neighbors:
                    return False
                    break
    return True
Example #12
0
def find_nodes_with_degree(graph, filter_function):
    junctures = []
    for node in nx.nodes_iter(graph):
        degree = nx.degree(graph, node)
        if filter_function(degree):
            junctures.append(node)
    return junctures
Example #13
0
def analyze_event_sequence_graph(graph_file):
    G = cPickle.load(open(graph_file, 'rb'))

    sequences_by_degree = {}
    for n in nx.nodes_iter(G):
        if G.node[n]['type'] == 'section':
            sequences_by_degree[n] = G.degree(n)

    sorted_seq = sorted(sequences_by_degree.iteritems(), key=itemgetter(0))

    print sorted_seq

    # plot parameters
    imw = 1024.0  # the full image width
    imh = 1024.0
    lm = 40.0
    rm = 50.0
    tm = 50.0
    bm = 50.0
    res = 72.0

    imwi = imw / res
    imhi = imh / res
    fig = mplt.figure(figsize=(imwi, imhi), dpi=res)
    ph = imh - tm - bm  # the height for both matricies
    pw = imw - lm - rm
    ax = fig.add_axes((lm / imw, bm / imh, pw / imw, ph / imh))

    ax.plot(range(len(sorted_seq)), [x[1] for x in sorted_seq])

    print[x for x in G.edges(sorted_seq[0][0], data=True)]
    print sorted_seq[0][0]
Example #14
0
def localEfficiencyCalc(GL):
    nodeNum = len(GL)
    localEfficiency = 0
    if nodeNum > 1:
        local1 = []
        for boxName in nx.nodes_iter(GL):
            radiusNodeList = GL.neighbors(boxName)
            boxNet = nx.Graph(GL.subgraph(radiusNodeList))
            boxNodes = len(boxNet)
            boxMat = nx.to_numpy_matrix(boxNet)
            boxSparse = csgraph_from_dense(boxMat)
            boxMatPath = shortest_path(boxSparse, method='auto', directed=False, return_predecessors=False, unweighted=True, overwrite=False)    
            boxPathList = []
            for i in range(boxNodes-1):
                for j in range(i+1, boxNodes):
                    tempDist = boxMatPath[i][j]
                    if np.isfinite(tempDist):
                        boxPathList.append(np.divide(1, tempDist, dtype = float))
            if len(boxPathList) > 0:
                local1.append(np.mean(boxPathList))
            else:
                local1.append(0)    
        localEfficiency = np.mean(local1)    
    
    return localEfficiency                    
Example #15
0
 def updateLayout(self):
     self.pos = nx.spring_layout(self.graph)
     self.x = []
     self.y = []
     for n in nx.nodes_iter(self.graph):
         self.x.append(self.pos[n][0])
         self.y.append(self.pos[n][1])
Example #16
0
def graphToCSV(G,graphtype, section, test):
    directory = "Datarows/"+graphtype+"/"
    if not os.path.exists(directory):
        os.makedirs(directory)
    writer_true = csv.writer(open(directory+section+"_true.csv", "a"))
    writer_false = csv.writer(open(directory+section+"_false.csv", "a"))
    A = nx.to_numpy_matrix(G)
    A = np.reshape(A, -1)
    arrGraph = np.squeeze(np.asarray(A))

    nb_nodes = 0
    for node in nx.nodes_iter(G):
        if len(G.neighbors(node))>0:
            nb_nodes += 1

    meta_info = [test,nb_nodes,G.number_of_edges(),nx.number_connected_components(G)]
    # On garde la même taille d'élemt de valeur de vérité #
    if test:
        if os.path.getsize(directory+section+"_true.csv") <= os.path.getsize(directory+section+"_false.csv"):
            writer_true.writerow(np.append(arrGraph, meta_info))
            return True
        else:
            return False
    else:
        if os.path.getsize(directory+section+"_false.csv") <= os.path.getsize(directory+section+"_true.csv"):
            writer_false.writerow(np.append(arrGraph, meta_info))
            return True
        else:
            return False
Example #17
0
def remove_catalysis(Graph):
    """
    DESCRIPTION:\n
        Return a Graph where Catalysis were removed.\n
        Remove Catalysis nodes from a graph generated with a BioPAX file.\n
        Catalysis has to be a 'biopax.entity_type' attribute of node.\n
    
    USAGE:\n
        Graph - a graph generated with NetworkX
    """
    print_nodes(Graph)
    targets = []
    for i in nx.nodes_iter(Graph):
        if Graph.node[i]['biopax.entity_type'] == 'Catalysis':
            targets.append(i)
            neigh = Graph[i].keys() #neighboors of catalysis node
            enz = [] #enzymes neighboors of catalysis node
            react = [] #reactions neigh of catalysis node
            pairs = []
            for n in neigh:
                if Graph[i][n][0]['label'] == 'CONTROLLER':
                    enz.append(n)
                elif Graph[i][n][0]['label'] == 'CONTROLLED':
                    react.append(n)
            for p in it.product(enz,react): #compute couple E1 R1 - E2 R1, etc.
                pairs.append(p)
            Graph.add_edges_from(pairs,root_index = '',label = 'CONTROLLER')
    if len(targets) == 0:
        print "No Catalysis node found in network"
    else:
        Graph.remove_nodes_from(targets)
        print "%d Catalysis nodes removed" % (len(targets))
        print_nodes(Graph)
    return Graph
Example #18
0
def point_sizes(g, node_sizes):
    sizes = []
    for n in nx.nodes_iter(g):
        if (node_sizes.has_key(n)):
            sizes.append(node_sizes[n])
        else:
            sizes.append(0)
    return sizes
Example #19
0
def pkg_filter(g):
    # removes built-in packages
    non_built_in = g.nodes()
    for n in nx.nodes_iter(g):
        prime_pkg = n.split('.')
        if prime_pkg[0] in built_in_pkgs:
            non_built_in.remove(n)
    return g.subgraph(non_built_in)
Example #20
0
 def draw(self):
     if(drawgif):
         nodeColors = [x.color for x in nx.nodes_iter(self.worldgraph)]
         plt.figure(figsize=(8,6))
         plt.title("Network at Age "+str(self.age))
         nx.draw(self.worldgraph, pos=self.nodeLayout, node_color=nodeColors, node_size=30, hold=1)
         plt.savefig("graphseries/graph"+str(self.age).zfill(4)+".png", dpi=250)
         plt.close()
Example #21
0
def coordinate(g):
    pos = nx.spring_layout(g)
    
    x = []
    y = []
    for n in nx.nodes_iter(g):
        x.append(pos[n][0])
        y.append(pos[n][1])
    return pos, x, y
Example #22
0
    def refine(self, threshold):

        big_nodes = []
        for n in nx.nodes_iter(self.graph):
            if nx.degree(self.graph, n) >= threshold:
                big_nodes.append(n)

        sg = self.graph.subgraph(big_nodes)
        self.setGraph(sg);
def printCommunities(graph, membership):
    for edge in membership:
        nodes = edge.split("-")
        u = nodes[0]
        v = nodes[1]
        graph.remove_edge(u, v)

    for node in nx.nodes_iter(graph):
        pass
Example #24
0
def nearest_vertex(G,measurement):
	minimum_distance = 1000000
	for node in nx.nodes_iter(G):
		pos = G.node[node]['pos']
		distance = math.sqrt((measurement[0]-pos[0])**2+(measurement[1]-pos[1])**2)
		if distance< minimum_distance:
			minimum_distance=distance
			result = node
	return result
Example #25
0
def data_polish(Graph, polish_ratio = 0.3, loop = 30):
        for i in xrange(loop):
            intersection = {}
            for j in nx.nodes_iter(Graph):
                    intersection[j] = 0
            temp = Graph.copy()
            for u in nx.nodes_iter(Graph):
                L = []
                for w in (temp.neighbors(u)+[u]):
                    for v in [x for x in (temp.neighbors(w)+[w]) if x < u]:
                        if(intersection[v] == 0):
                            L.append(v)
                        intersection[v] += 1
                for v in L:
                    sim = float(intersection[v]) / ((temp.degree(v)+1) + (temp.degree(u)+1) - intersection[v])
                    polish(Graph, u, v, sim, polish_ratio)
                    intersection[v] = 0
        print "end", i+1 , "times"
Example #26
0
 def structural_hole(self, n):
     if self.node_communities is None:
         self.node_communities = dict()
         communities = list(fx.read_communities())
         for node in nx.nodes_iter(self.G):
             self.node_communities[node] = 0
         for community in communities:
             for node in community:
                 self.node_communities[node] += 1
     return self.node_communities[n]
Example #27
0
def create_egonet_features(g):
    egoNetList = []
    for n in nx.nodes_iter(g):
        resultObj = Result()
        resultObj.egoNetGraph = nx.ego_graph(g, n, radius=1, center=True, undirected=False, distance=None)
        resultObj.egoNetDegree = nx.number_of_nodes(resultObj.egoNetGraph)
        resultObj.nofEdges = nx.number_of_edges(resultObj.egoNetGraph)
        egoNetList.append(resultObj)
 #       print resultObj
    return egoNetList
def L_P_jaccard_coefficient(graph, threshold_add, threshold_del, time):
    # parameters initialization
    network = graph
    # print nx.number_of_edges(network)
    num_add = int(
        threshold_add *
        nx.number_of_edges(network))  # the number of egdes to be added
    # num_del = int(threshold_del * nx.number_of_edges(network))  # the number of edges to be deleted
    nodes_pair_with_edge = []  # the pairs of nodes with edges
    nodes_pair_without_edge = []  # the pairs of nodes without edges
    probability_add = []  # the probabilities of the pairs of nodes to be added
    # probability_del = []  # the probabilities of the pairs of nodes to be deleted
    u = 0  # node i
    v = 0  # node j
    score = 0  # the score of each pair of nodes in link prediction model
    # total_score_with_edge = 0.0  # the sum of scores of pairs of nodes with edge
    total_score_without_edge = 0.0  # the sum of scores of pairs of nodes without edge

    #  calculate the score of each pair of nodes
    for i, elei in enumerate(nx.nodes_iter(network)):
        for j, elej in enumerate(nx.nodes_iter(network)):

            if i >= j:
                continue
            if not network.has_edge(elei, elej):
                try:
                    pre = nx.jaccard_coefficient(network, [(elei, elej)])
                    for u, v, s in pre:
                        score = s
                except:
                    continue
                total_score_without_edge += score
                nodes_pair_without_edge.append((elei, elej, score))

    for a, b, c in nodes_pair_without_edge:
        probability_add.append(
            c / total_score_without_edge
        )  # calculate the probabilities of edges to be added
    # select edges to be added according to probabilities
    edges_add = selection_probability.select(nodes_pair_without_edge,
                                             probability_add, num_add)
    for a, b, c in edges_add:
        network.add_edge(a, b)  # add selected edges
Example #29
0
 def updateSizes(self, amplification = 40):
     for n in nx.nodes_iter(self.graph):
         if self.sizeDict.has_key(n):
             pass
         else:
             self.sizeDict[n] = 0;
     if ( len(self.sizeDict) != len(self.graph.nodes()) ):
         print 'panic'
     self.sizes = []
     for key in self.sizeDict:
         self.sizes.append(self.sizeDict[key] * amplification)
Example #30
0
def refine(g, threshold):

    big_nodes = []
    
    for n in nx.nodes_iter(g):
        if nx.degree(g, n) >= threshold:
            big_nodes.append(n)

    sg = g.subgraph(big_nodes)

    return sg
Example #31
0
def calculate_nearest_edgenode(graph, node, distance_from_node):
    current_set = set([node])
    visited_set = set()
    visited_set.update(current_set)
    for m in nx.nodes_iter(graph):
        if m in nx.single_source_shortest_path_length(
                graph, node, cutoff=distance_from_node) and\
           m not in visited_set:
            visited_set.add(m)
    current_set = visited_set
    return current_set
Example #32
0
def binaryAdd(G1, G2):
    '''
    "Adds" G1 and G2 on their 'class' attribute
    '''
    Gout = G1.copy()
    for v in nx.nodes_iter(Gout):
        Gout.node[v]['class'] = G1.node[v]['class'] + G2.node[v]['class']
        if Gout.node[v]['class'] == 2:
            Gout.node[v]['class'] = 1
    
    return Gout
Example #33
0
def binarySub(G1, G2):
    '''
    "Substracts" G2 to G1 on their 'class' attribute
    '''
    Gout = G1.copy()
    for v in nx.nodes_iter(Gout):
        Gout.node[v]['class'] = G1.node[v]['class'] - G2.node[v]['class']
        if Gout.node[v]['class'] == -1:
            Gout.node[v]['class'] = 0

    return Gout
Example #34
0
def binaryOr(G1, G2):
    '''
    Performs an OR on the attribute 'class' of G1 and G2.
    Equivalent to an union
    '''
    Gout = G1.copy()
    for v in nx.nodes_iter(Gout):
        Gout.node[v]['class'] = 0
        if G1.node[v]['class'] == 1 or G2.node[v]['class'] == 1:
            Gout.node[v]['class'] = 1

    return Gout
Example #35
0
def binaryAnd(G1, G2):
    '''
    Perform an AND on the attribute 'class' of G1 and G2.
    Equivalent to an intersection
    '''
    Gout = G1.copy()
    for v in nx.nodes_iter(Gout):
        Gout.node[v]['class'] = 0
        if G1.node[v]['class'] == 1 and G2.node[v]['class'] == 1:
            Gout.node[v]['class'] = 1

    return Gout
Example #36
0
def computeInvertedAttr(G,queryAttrs):
    '计算查询节点属性的倒排'
    attrNodeDict=defaultdict(list) #<属性,节点集合>
    for id in nx.nodes_iter(G):
        # print 'id:',str(id),G.node[id]
        ####################可能存在有节点但是没有属性(2017.3.4)###############
        if G.node[id].has_key('attr'):
            if G.node[id]['attr'] is not None: ##判断是不是等于None
                for attr in  G.node[id]['attr']:
                        if attr in queryAttrs:
                            attrNodeDict[attr].append(id)
    return attrNodeDict
Example #37
0
def computeVAttrScore(H,VwList,queryAttributes,queryVertexes):
    '计算节点的属性分数'
     ##(re:2017.3.2 nodeAttScore改成字典)
    nodeAtteSocreDict={}
    for n in nx.nodes_iter(H):
        if n not in queryVertexes:####不计算查询节点的分数
            nattr=H.node[n]['attr']
            if nattr is not None:
                tmp=[val for val in nattr if val in queryAttributes]  #计算节点属性与查询属性的交集
                score=sum([2.0*VwList[val]-1 for val in tmp])
                nodeAtteSocreDict[n]=score
            else:
                nodeAtteSocreDict[n]=0
    return nodeAtteSocreDict
Example #38
0
def drawLabelledGraph(G, title = None):
    '''
    Given a decimal graph G it draws theconnected components (labels, or 'class' =/= 0) using different colors
    '''
    pos = nx.get_node_attributes(G, 'pos')
    dist = nx.get_node_attributes(G, 'dist')
    lblDict = nx.get_node_attributes(G, 'class')
    lblRange = lblDict[max(lblDict, key = lambda x: lblDict.get(x))]
    #Draws the nodes with no label
    thatNodes = [k for k in nx.nodes_iter(G) if G.node[k]['class'] == 0]
    nx.draw_networkx_nodes(G, pos, thatNodes, node_color = 'k', linewidths = 0)
    #Draws the labelled nodes
    for i in range(lblRange):
        thatNodes = [k for k in nx.nodes_iter(G) if G.node[k]['class'] == (i + 1)]
        r = random()
        v = random()
        b = random()
        color = (r, v, b) #Problem with coloring if 3 nodes in the connected component
        nx.draw_networkx_nodes(G, pos, thatNodes, node_color = color, linewidths = 0)
    nx.draw_networkx_edges(G, pos, edge_color = 'k')
    if title:
        plt.title(title)
    plt.show()
Example #39
0
def drawDistanceGraph(G, title = None):
    '''
    Given a decimal graph (with attribute 'dist') it draws the distance graph using nodes with dist = 0 as basis
    '''
    pos = nx.get_node_attributes(G, 'pos')
    dist = nx.get_node_attributes(G, 'dist')
    distRange = [v for k, v in dist.items() if v != float("inf")]
    distRange = list(set(distRange)) #kill duplicates
    #Draw the ones at inf distance :
    thatNodes = [k for k in nx.nodes_iter(G) if G.node[k]['dist'] == float("inf")]
    nx.draw_networkx_nodes(G, pos, thatNodes, node_color = 'r', linewidths = 0)
    #Draw normal nodes with shades of grey according to the distance
    for i in range(1, len(distRange)):
        thatNodes = [k for k in nx.nodes_iter(G) if G.node[k]['dist'] == i]
        nx.draw_networkx_nodes(G, pos, thatNodes, node_color = str(1 - (i * 1 / len(distRange))), linewidths = 0)
    #Draw the starting nodes
    #A list to be able to handle distances from a region
    thatStartNodes = [k for k in nx.nodes_iter(G) if G.node[k]['dist'] == 0]
    nx.draw_networkx_nodes(G, pos, thatStartNodes, node_color = 'y', linewidths = 0)

    nx.draw_networkx_edges(G, pos, edge_color = 'k')
    if title:
        plt.title(title)
    plt.show()
Example #40
0
def drawZoneofIGraph(G, title = None):
    '''
    Draw the zone of influences graph G which is a decimal graph. Also connected components are labelled differently in 'class' and the zone of influence is nodes whose 'class' is = -label
    '''
    pos = nx.get_node_attributes(G, 'pos')
    dist = nx.get_node_attributes(G, 'dist')
    lblDict = nx.get_node_attributes(G, 'class')
    lblRange = lblDict[max(lblDict, key = lambda x: lblDict.get(x))]
    #Draws the nodes on the border of zones of influence
    thatNodes = [k for k in nx.nodes_iter(G) if G.node[k]['class'] == 0]
    nx.draw_networkx_nodes(G, pos, thatNodes, node_color = 'k', linewidths = 0)
    #Draws the labelled nodes (part of connected comp) and their zones of I
    alrdyDrawnEdges = []
    for i in range(lblRange):
        thatNodes = [k for k in nx.nodes_iter(G) if G.node[k]['class'] == (i + 1)]
        theirZI = [k for k in nx.nodes_iter(G) if G.node[k]['class'] == -(i + 1)]
        allThatNodes = thatNodes + theirZI
        thatEdges = []
        for v in allThatNodes:
            for vv in allThatNodes:
                if G.has_edge(v, vv):
                    thatEdges.append((v, vv))
                    alrdyDrawnEdges.append((v, vv))
        r = random()
        v = random()
        b = random()
        color = (r, v, b) #Problem with coloring if 3 nodes in the connected component
        nx.draw_networkx_nodes(G, pos, thatNodes, node_color = color, linewidths = 0.2)
        nx.draw_networkx_nodes(G, pos, theirZI, node_color = color, linewidths = 0, alpha = 0.7)
        nx.draw_networkx_edges(G, pos, thatEdges, edge_color = 'k', alpha = 0.2) #Problem with edge_color = color ?
    #Draw the remaining edges
    remainingEdges = [e for e in G.edges() if e not in alrdyDrawnEdges]
    nx.draw_networkx_edges(G, pos, remainingEdges, edge_color = 'k')
    if title:
        plt.title(title)
    plt.show()
def normalized_Laplacian(G):
	nodes = nx.nodes(G)
	total_nodes = nx.number_of_nodes(G)
	I = np.identity(total_nodes)
	D = np.identity(total_nodes)
	A = nx.adj_matrix(G)

	idx = 0
	for a_node in nx.nodes_iter(G):
		D[idx][idx] = G.degree(a_node)
		idx = idx + 1

	inv_sqrt_D = lg.inv(np.sqrt(D))
	M = (inv_sqrt_D * A * inv_sqrt_D)
	L = I - M
	return L
def shingle2(G, s1, c1, s2, c2):
    p = 18446744073709551557 # 2^64 - 59
    coefficients1 = [(random.randint(2, p - 1), random.randint(2, p - 1)) for i in xrange(c1)]
    coefficients2 = [(random.randint(2, p - 1), random.randint(2, p - 1)) for i in xrange(c2)]
    
    shingle_vertices = {}
    for v in nx.nodes_iter(G):
        for sh in shingle(G.successors(v), s1, coefficients1, p):
            shingle_vertices.setdefault(sh, []).append(v)
                
    metashingle_shingles = {}
    for sh, vertices in shingle_vertices.iteritems():
        for metash in shingle(vertices, s2, coefficients2, p):
            metashingle_shingles.setdefault(metash, []).append(sh)

    return shingle_vertices, metashingle_shingles
def collapse_graph_in_place(graph):
    """ Collapse any degree two nodes in the graph *in-place*

    Specifically, collapse degree two nodes where the ASN
    of the node matches the ASN of the node on the other side.
    """
    asns = nx.get_node_attributes(graph, 'asn')
    types = nx.get_node_attributes(graph, 'nodetype')
    collapseable = True

    ctr = 0
    while collapseable:
        ctr += 1
        logging.info("Pass %s" % ctr)
        to_collapse = []
        zero_length = 0
        exempt = set()
        for node in nx.nodes_iter(graph):
            if types[node] != 'pop':
                continue

            neighbors = graph.neighbors(node)
            if len(neighbors) == 2:
                # This link is collapsible if the asn matches that of
                # one of its neighbors **and** none of the parties
                # have already been collapsed on this pass
                if ((asns[node] == asns[neighbors[0]]
                   or asns[node] == asns[neighbors[1]])
                   and len(exempt & (set([node]) | set(neighbors))) == 0):
                    to_collapse.append(node)
                    exempt |= set(neighbors)
                    exempt |= set([node])

        if len(to_collapse) != 0:
            logging.info("Collapsing %s nodes" % len(to_collapse))

            for node in to_collapse:
                neighbors = graph.neighbors(node)
                s1_weight = float(graph[node][neighbors[0]]['latency'])
                s2_weight = float(graph[node][neighbors[1]]['latency'])
                graph.add_edge(*neighbors, latency=s1_weight + s2_weight)
                zero_length += 1 if s1_weight + s2_weight == 0 else 0
                graph.remove_node(node)
            logging.info("Created {0} new links. {1} with zero latency"
                         .format(len(to_collapse), zero_length))
        else:
            collapseable = False
Example #44
0
def kcoreMaintain(H,maxCoreness,deletedVs,queryVs):
    if H:
        '删除节点后,保持最小度至少是maxCoreness'
        ###这里是找最小度
        mind=H.degree(H.nodes()[0])
        minIndex=H.nodes()[0]
        for n in nx.nodes_iter(H):
            if H.degree(n)<mind:
                mind=H.degree(n)
                minIndex=n
        ###若当前最小度小于规定的度,则进行删除啊
        if mind<maxCoreness:
            H.remove_node(minIndex)
            deletedVs.append(minIndex)
            kcoreMaintain(H,maxCoreness,deletedVs,queryVs)
    ##删除完不满足条件的节点后就可以返回了
    return
    def CreateFromStructureLocationsGraph(cls, graph, scalars=None):
        '''
        :Param networkx.Graph: Graph of structure locations
        :Param array: Array of scalars for each dimension [X Y Z Radius]
        :Return: Morphology object 
        '''
        morph = Morphology()

        if scalars is None:
            global DefaultScalars
            scalars = DefaultScalars  # Use the values for the first rabbit connectome if not specified

        morph._scalars = scalars

        for node in nx.nodes_iter(graph):
            morphnode = SphereNode.CreateFromLocationObject(node)
            morph.graph.add_node(morphnode)

        morph._kdtree = None
        morph.__correct_scale()
def E_R(network, p, rate, time):
    num_add = int(rate * nx.number_of_nodes(network))
    nodes_added_this_step = []
    i = 1
    nodes_del = []
    while True:
        nodes_del = random.sample(network.nodes(), num_add)
        if not ('newcomer' in nodes_del):
            break
    network.remove_nodes_from(nodes_del)
    while i <= num_add:
        nodes_added_this_step.append('%d' % time + '_' + '%d' % i)
        i += 1
    network.add_nodes_from(nodes_added_this_step)
    for i in nodes_added_this_step:
        for j in nx.nodes_iter(network):
            if j == 'newcomer':
                continue
            x = random.uniform(0, 1)
            if x <= p:
                network.add_edge(i, j)
Example #47
0
def default_test(cp_num, isp_num):
    cpNetworks = []
    for i in range(cp_num):
        cpNetworks.append(CpNetwork('Abilene', CP_TOPO_DIR))

    trafficMatrix = {}
    ispNetworks = []
    union_ISP = networkx.DiGraph()
    dst_topos = []
    for i in range(isp_num):
        net = IspNetwork('isp_network', ISP_TOPO_DIR)
        mapping = dict(zip(net.topo._graph.nodes(), [x + i * 11 for x in networkx.nodes_iter(net.topo._graph)]))
        net.topo._graph = networkx.relabel_nodes(net.topo._graph, mapping)
        ispNetworks.append(net)
        dst_topos.append(net.topo._graph)
        #union_ISP = networkx.union(union_ISP, net.topo._graph)

    for i in range(cp_num):
        trafficMatrix[i] = cpNetworks[i].egress_volume_shortest([0, 1, 2], dst_topos)
        
            
    with open(DEFAULT_LOG_DIR, 'a') as f:
        for i in range(isp_num):
            ispNetworks[i].linkcaps = set_link_caps(ispNetworks[i].topo)
            pptc, throughput = ispNetworks[i].calc_path_shortest(trafficMatrix, i)
            #isp_pptc.append(pptc)

            cp_bw_total = {}
            for tc, paths in pptc.iteritems():
                cp_id = tc.network_id
                for path in paths:
                    if cp_id in cp_bw_total:
                        cp_bw_total[cp_id] += path.bw
                    else:
                        cp_bw_total[cp_id] = path.bw
                        
            for cp_id in cp_bw_total.keys():
                f.write('cp {} isp {} get bw {}'.format(cp_id, i, cp_bw_total[cp_id]))
                f.write('\n')
Example #48
0
def run():
    #Here's the grph to look at-
    import networkx as nx
    G = nx.Graph()
    nodes = ["Gur","Qing","Samantha","Jorge","Lakshmi","Jack","John","Jill"]
    edges = [("Gur","Qing",{"source":"work"}),
             ("Gur","Jorge", {"source":"family"}),
             ("Samantha","Qing", {"source":"family"}),
             ("Jack","Qing", {"source":"work"}),
             ("Jorge","Lakshmi", {"source":"work"}),
             ("Jorge","Samantha",{"source":"family"}),
             ("Samantha","John", {"source":"family"}),
             ("Lakshmi","Jack", {"source":"family"}),
             ("Jack","Jill", {"source":"charity"}),
             ("Jill","John",{"source":"family"})]
    G.add_nodes_from(nodes)
    G.add_edges_from(edges)
    for n in nx.nodes_iter(G):
        if n == "John":
            #         print('yey!')
            l = get_connections(G, n, 'family')
            print(l)
            break
Example #49
0
def sortNode6(G):  #Clustering Rank
    '''
    Cluster Rank
    '''
    matG = []
    nodeG = nx.nodes(G)
    matG.append(nodeG)
    cl = nx.clustering(G).values()
    for i in range(len(cl)):
        cl[i] = pow(10, -cl[i])
    cl = list(cl)
    si = []
    for nodei in nx.nodes_iter(G):
        num = 0
        for vj in nx.all_neighbors(G, nodei):
            num = num + nx.degree(G, vj) + 1
        si.append(num)
    prod = list(map(lambda a, b: a * b, cl, si))
    matG.append(prod)
    matG = np.array(matG)
    result = matG.T[np.lexsort(-matG)].T
    li = result[0]
    li = li.astype(np.int32)
    return li
Example #50
0
def getAllOpenTriangles(G, sets):
    opens = {}
    index = {}

    for v in nx.nodes_iter(G):
        s1 = set(G[v])
        for w in s1:
            s2 = set(G[w])
            pair = (v, w) if v < w else (w, v)

            if pair not in opens:
                opens[pair] = []
            opens[pair].extend([(i, v) if i < v else (v, i)
                                for i in (s1 - s2 - set([w]))])
            opens[pair].extend([(i, w) if i < w else (w, i)
                                for i in (s2 - s1 - set([v]))])

            for teamID, nodes in sets.iteritems():
                if v in nodes and w in nodes:
                    if pair not in index:
                        index[pair] = set()
                    index[pair].add(teamID)

    return opens, index
Example #51
0
def label_undirected_graph(graph):
    for node in nx.nodes_iter(graph):
        graph.node[node]['degree'] = graph.degree(node)
    graph.graph['labeled'] = True
Example #52
0
def label_directed_graph(digraph):
    for node in nx.nodes_iter(digraph):
        digraph.node[node]['in-degree'] = digraph.in_degree(node)
        digraph.node[node]['out-degree'] = digraph.out_degree(node)
Example #53
0
for fname, symbols in requires.items():
    dependencies[fname] = set(
        pick(provides[s]) for s in symbols if s in provides)
    #    print fname + ': ' + ' '.join(sorted(dependencies[fname]))
    unmet = set()
    demangled = set()
    for s in symbols:
        if s not in provides and not symbols_re_skip.search(s): unmet.add(s)
    for u in sorted(unmet):
        dm = subprocess.check_output(["c++filt", u])
        demangled.add(dm.rstrip('\r\n'))
#    if demangled :  print fname + ': undefined : ' + ' '.join(sorted(demangled))

import networkx as nx

G = nx.DiGraph()
for key, values in dependencies.items():
    G.add_node(key)
    for val in values:
        G.add_edge(key, val)

for node in nx.nodes_iter(G):
    s = nx.dfs_successors(G, node)
    deps = set()
    if s:
        for key, vals in s.items():
            if key != node: deps.add(key)
            for v in vals:
                deps.add(v)
    print node + ': ' + ','.join(sorted(deps))
Example #54
0
 def do_list_nodes(self, line):
     "List all nodes for the graph"
     print "[*] All nodes: "
     for n in nx.nodes_iter(G):
         print n
 def test_nodes_iter(self):
     assert_equal(list(self.G.nodes_iter()),list(nx.nodes_iter(self.G)))
     assert_equal(list(self.DG.nodes_iter()),list(nx.nodes_iter(self.DG)))
Example #56
0
def optimal_routing(cp_num):
    cpNets = []
    node_num = 0
    union_graph = networkx.DiGraph()
    for i in range(cp_num):
        net = CpNetwork('Abilene', CP_TOPO_DIR)
        mapping = dict(
            zip(net.topo._graph.nodes(),
                [x + i * 11 for x in networkx.nodes_iter(net.topo._graph)]))
        net.topo._graph = networkx.relabel_nodes(net.topo._graph, mapping)
        node_num += networkx.number_of_nodes(net.topo._graph)
        cpNets.append(net)
        union_graph = networkx.union(union_graph, net.topo._graph)

    ispNet = IspNetwork('isp_network', ISP_TOPO_DIR)
    mapping = dict(
        zip(ispNet.topo._graph.nodes(),
            [x + node_num for x in networkx.nodes_iter(ispNet.topo._graph)]))
    ispNet.topo._graph = networkx.relabel_nodes(ispNet.topo._graph, mapping)

    trafficMatrix = {}
    for i in range(cp_num):
        trafficMatrix[i] = cpNets[i].egress_default(
            networkx.nodes_iter(cpNets[i].topo._graph), ispNet.topo)

    ispNet.topo._graph = networkx.union(ispNet.topo._graph, union_graph)
    ispNet.linkcaps = set_link_caps(ispNet.topo)
    for i in range(cp_num):
        node_1 = i * 11
        node_2 = i * 11 + 1
        node_3 = cp_num * 11
        node_4 = cp_num * 11 + 1
        ispNet.topo._graph.add_edge(node_1, node_3)
        ispNet.topo._graph.add_edge(node_2, node_4)
        ispNet.linkcaps[(node_1, node_3)] = 10000000
        ispNet.linkcaps[(node_2, node_4)] = 10000000

    node = cp_num * 11
    ispNet.linkcaps[(node, node + 1)] = 10
    ispNet.linkcaps[(node + 1, node)] = 10
    ispNet.linkcaps[(node, node + 2)] = 10
    ispNet.linkcaps[(node + 2, node)] = 10

    print ispNet.topo._graph.edges()
    pptc, throughput = ispNet.calc_path_maxminfair(trafficMatrix)

    ingress_bw_dict = {}
    for i in range(cp_num):
        ingress_bw_dict[i] = {}
    for tc, paths in pptc.iteritems():
        for path in paths:
            nodes = path.getNodes()
            ingress = nodes[0]
            if ingress in ingress_bw_dict[tc.network_id]:
                ingress_bw_dict[tc.network_id][ingress] += path.bw
            else:
                ingress_bw_dict[tc.network_id][ingress] = path.bw

    for id, bw_dict in ingress_bw_dict.iteritems():
        print 'network id:{}'.format(id)
        for ingress, bw in bw_dict.iteritems():
            print '{}:{}'.format(ingress, bw)

    with open(OPTIMAL_LOG_DIR, 'a') as f:
        f.write(str(throughput))
        f.write('\n')
        '''f.write('independent routing\n')
def B_A(graph, internal_link_factor, links_added_per_step, add_percentage,
        del_percentage, time_step):
    # parameters initialization
    network = graph
    f = internal_link_factor
    m = links_added_per_step
    num_add = int(add_percentage * nx.number_of_nodes(network))
    num_del = int(del_percentage * nx.number_of_nodes(network))
    time = time_step
    probability = []
    probability_temp = []
    total_probability = 0
    nodes_pre_step = []
    nodes_added_this_step = []
    nodes_pair_without_edge = []
    nodes_for_del = []

    # calculate how many nodes should be added in this time step
    '''if n >=math.log10(t) and n <= math.log10(t+1):
        num_add = int (n * nx.number_of_nodes(network) / 100)
        num_del = num_add
        #print 'num_add',num_add
    else:
        num_add = int(math.log10(t) * nx.number_of_nodes(network) / 100)
        num_del = int((n * (t-1) / (pow(10,n) - 1)) * nx.number_of_nodes(network) / 100)
        #print 'num_add', num_add
        #print 'num_del', num_del'''

    # calculate the probability of each node to be linked according to whose degree
    for node in nx.nodes_iter(network):
        #print 'degree',network.degree(node)
        nodes_pre_step.append(node)
        probability_temp.append(network.degree(node))
        total_probability += network.degree(node)
    for prob in probability_temp:
        probability.append(float(prob) / total_probability)
    #print 'prob_temp',probability
    #print 'prob',probability

    # add num_add nodes to the network
    i = 1
    while i <= num_add:
        nodes_added_this_step.append('%d' % time + '_' + '%d' % i)
        i += 1
    network.add_nodes_from(nodes_added_this_step)

    # create m links for each node added in this time dtep according to the probability
    #print 'nodes_pre_step',nodes_pre_step
    for node_added in nodes_added_this_step:
        selected_nodes = selection_probability.select(nodes_pre_step,
                                                      probability, m)
        #print 'selected_nodes', selected_nodes
        for node in selected_nodes:
            network.add_edge(node_added, node)

    # add f % internal links according to the production of each pair of nodes' degrees
    probability = []
    probability_temp = []
    total_probability = 0
    for i, elei in enumerate(nx.nodes_iter(network)):
        for j, elej in enumerate(nx.nodes_iter(network)):
            if i >= j:
                continue
            if not network.has_edge(elei, elej):
                nodes_pair_without_edge.append((elei, elej))
                probability_temp.append(
                    network.degree(elei) * network.degree(elej))
                total_probability += network.degree(elei) * network.degree(
                    elej)
    for prob in probability:
        probability.append(float(prob) / total_probability)
    selected_pairs = selection_probability.select(
        nodes_pair_without_edge, probability,
        int(f * nx.number_of_nodes(network)))
    for nodei, nodej in selected_pairs:
        network.add_edge(nodei, nodej)

    # delete num_del nodes according to whose degree
    probability = []
    probability_temp = []
    total_probability = 0.0
    for node in nx.nodes_iter(network):
        nodes_for_del.append(node)
        if network.degree(
                node
        ) == 0:  # if the degree is 0, let it to be 0.1, to avoid errors in probability calculation
            node_degree = 0.1
        else:
            node_degree = network.degree(node)
        probability_temp.append(1.0 / node_degree)
        total_probability += 1.0 / node_degree
    for prob in probability_temp:
        probability.append(prob / total_probability)
    selected_del = selection_probability.select(nodes_for_del, probability,
                                                num_del)
    for node in selected_del:
        network.remove_node(node)