Exemplo n.º 1
0
    def run(self):
        clone = self.graph.addCloneSubGraph("clone")
        cores = tlp.DoubleProperty(clone)
        self.pluginProgress.setComment("Computing K-Peaks ...")
        while clone.numberOfNodes() > 0:
            self.pluginProgress.progress(
                self.graph.numberOfNodes() - clone.numberOfNodes(),
                self.graph.numberOfNodes())
            clone.applyDoubleAlgorithm("K-Cores", cores, self.dataSet)
            d = cores.getNodeMax()
            for v in clone.getNodes():
                if cores[v] == d:
                    self.result[v] = d
                    clone.delNode(v)
        self.graph.delAllSubGraphs(clone)

        ## compute core dependancy
        self.pluginProgress.setComment("Computing Core Dependency...")
        base_cores = tlp.DoubleProperty(self.graph)
        self.graph.applyDoubleAlgorithm("K-Cores", base_cores, self.dataSet)
        core_dep = 0.
        for n in self.graph.getNodes():
            if base_cores[n] > 0:
                core_dep += (base_cores[n] - self.result[n]) / base_cores[n]
        core_dep /= self.graph.numberOfNodes() + 0.
        self.dataSet["Core Dependency"] = core_dep
        return True
def main(graph):
    g = graph.addCloneSubGraph("clone")
    id_ = g['id']
    ds = tlp.getDefaultPluginParameters("Betweenness Centrality", g)
    ds['target'] = "nodes"
    with open('betweness.csv', 'w') as f:
        writer = csv.writer(f)
        writer.writerow(
            ['nb_sommets', 'mesure', 'taille_composante', 'lg moyenne'])

        for i in range(100):
            #on veut le calcul uniquement sur les sommets
            betweenness = tlp.DoubleProperty(g)
            g.applyDoubleAlgorithm("Betweenness Centrality", betweenness, ds)
            #Sommet qui a la plus grande valeur
            n = betweenness.getNodesEqualTo(betweenness.getNodeMax(g),
                                            g).next()
            g.delNode(n)
            comp = tlp.ConnectedTest.computeConnectedComponents(g)
            maxl = 0

            for n in comp:
                if (len(n) > maxl):
                    maxl = len(n)

            writer.writerow([
                i, 'betweeness centrality', maxl / g.numberOfNodes(),
                tlp.averagePathLength(g)
            ])
Exemplo n.º 3
0
def distances(G, length, src):
    visited = tlp.BooleanProperty(G)
    visited.setAllNodeValue(False)
    dist = tlp.DoubleProperty(G)
    dist.setAllNodeValue(10e+20)
    dist[src] = 0.

    while True:
        min_node = tlp.node()
        for u in visited.getNodesEqualTo(False):
            if not min_node.isValid():
                min_node = u
            elif dist[u] < dist[min_node]:
                min_node = u

        if not min_node.isValid():
            break

        visited[min_node] = True

        for e in G.getInOutEdges(min_node):
            weight = dist[min_node] + length[e]
            o = G.opposite(e, min_node)
            if weight < dist[o]:
                dist[o] = weight
    return dist
	def run(self):
		self.weight = self.dataSet["weight"]
		self.cost = self.dataSet["cost"]
		self.position = self.dataSet["layout"]
	
		iterators_edges = {"InOut": self.graph.getInOutEdges, "In": self.graph.getInEdges, "Out": self.graph.getOutEdges}
		chossen_iterator = iterators_edges[self.dataSet["type"]]
	
		self.result.setAllNodeValue(0.)
		self.result.setAllEdgeValue(0.)
	
		avg_path_length = 0.
		total_flow      = 0.
		nb_nodes_done   = 0
		delta = tlp.DoubleProperty(self.graph)
		for src in self.graph.getNodes():
			self.pluginProgress.progress(nb_nodes_done,self.graph.numberOfNodes())
			nb_nodes_done += 1

			delta.setAllNodeValue(0.)
			ancestors, stack, nb_paths = self.shortestPaths(src, chossen_iterator)
			
			while len(stack) > 0:
				w = stack.pop()
				flow_sw = self.flow_amount(src, w)/2.
				# flow divided by two since flows are undirected
				total_flow += flow_sw
				if w != src :
					self.result[w] += delta[w]
				for v in ancestors[w] :
					inc_delta = (nb_paths[v]/nb_paths[w])*(flow_sw + delta[w])
					delta[v] += inc_delta
					e = self.graph.existEdge(v, w, False)
					if self.dataSet["type"] == "Out":
						e = self.graph.existEdge(v, w, True)
					if self.dataSet["type"] == "In":
						e = self.graph.existEdge(w, v, True)
					self.result[e] += inc_delta
					
					val = 1.
					if self.cost is not None:
						val = self.cost[e]
					avg_path_length += val * inc_delta
	
		self.dataSet["average path length"] = avg_path_length
		self.dataSet["total flow"] = total_flow
		return True
Exemplo n.º 5
0
def fault_tolerance(flow, flow_value, net, sum_flow):
    res = 0.
    id_comp = tlp.DoubleProperty(net)
    for e in net.getEdges():
        temp_net = net.addCloneSubGraph()
        temp_net.delEdge(e)
        temp_net.applyDoubleAlgorithm("Connected Component", id_comp)
        flow_intra_comp = 0.
        for e in flow.getEdges():
            s = flow.source(e)
            t = flow.target(e)
            if temp_net.isElement(s) and temp_net.isElement(t):
                if id_comp[s] == id_comp[t]:
                    flow_intra_comp = flow_intra_comp + flow_value[e]
        res = res + flow_intra_comp / sum_flow
        net.delAllSubGraphs(temp_net)
    return res / (net.numberOfEdges() + 0.)
Exemplo n.º 6
0
    def run(self):
        # get parameters
        self.create_sg = self.dataSet["Create subgraphs?"]
        self.create_prop = self.dataSet["Create property?"]

        ## init results
        self.nb_cliques = 0
        prop_memb = None
        cliques_sg = None
        if self.create_sg:
            self.graph.addCloneSubGraph("Cliques")
        if self.create_prop:
            prop_memb = self.graph.getIntegerVectorProperty("clique_memb")
            for n in self.graph.getNodes():
                prop_memb.resizeNodeValue(n, 0)

        ## compute the degeneracy ordering of the nodes
        peel = tlp.DoubleProperty(self.graph)
        self.graph.applyDoubleAlgorithm("K-Cores", peel)
        peelMap = {}
        for u in self.graph.getNodes():
            peelMap[u] = peel[u]
        sortedMap = OrderedDict(sorted(peelMap.items(), key=lambda x: x[1]))
        order = list(sortedMap.keys())

        ## start clique detection
        for i in range(len(order)):
            Nu = self.getNeighborhoodSet(order[i])
            if i == len(order) - 1:
                P = set()
            else:
                P = Nu & set(order[(i + 1):len(order)])
            if i == 0:
                X = set()
            else:
                X = Nu & set(order[0:i])
            self.maxCliquePivot(P, set([order[i]]), X)
        ## end
        return True
	def shortestPaths(self, src, it_edges):
		distances = {}
		queue = pd.priority_dict({})
		queue[src.id] = 0
		## priority_dict requires __lt__ operator
		## which is undefined for tlp.node
		ancestors = {src : []}
		stack = []
		nb_paths = tlp.DoubleProperty(self.graph)
		nb_paths.setAllNodeValue(1)
		nb_paths[src] = 1

		while len(queue) > 0:
			v_id = queue.smallest()
			d = queue[v_id]
			v = tlp.node(v_id)
			queue.pop_smallest()
			
			stack.append(v)
			distances[v] = d
			for e in it_edges(v):
				val = 1.
				if self.cost is not None:
					val = self.cost[e]
				w = self.graph.opposite(e, v)
				alt = d + val
				if w in distances:
					if alt < distances[w]:
						self.pluginProgress.setError("Dijkstra: found better path to already-final vertex")
				else:
					if (w.id not in queue) or (alt < queue[w.id]):
						queue[w.id] = alt
						ancestors[w] = [v]
						nb_paths[w] = nb_paths[v]
					elif (alt == queue[w.id]) :
						ancestors[w].append(v)
						nb_paths[w] = nb_paths[w] + nb_paths[v]
		return ancestors, stack, nb_paths
Exemplo n.º 8
0
def main(graph):
    # Nodes weight and position
    weight = graph.getDoubleProperty("weight")
    position = graph.getLayoutProperty("viewLayout")
    # Edges length
    length = graph.getDoubleProperty("length")

    # Create temp graph properties
    is_road = tlp.BooleanProperty(graph)
    is_road.setAllEdgeValue(True)
    flow_val = tlp.DoubleProperty(graph)

    # Compute the flow between nodes pairs
    for n1 in graph.getNodes():
        for n2 in graph.getNodes():
            if n1.id < n2.id:
                d_12 = position[n1].dist(position[n2])
                if d_12 > 0:
                    e = graph.addEdge(n1, n2)
                    is_road[e] = False
                    flow_val[e] = weight[n1] * weight[n2] / (d_12 * d_12)

    # Compute Flow Betweenness
    ds = tlp.getDefaultPluginParameters("Flow Betweenness")
    ds["is road"] = is_road
    ds["flow value"] = flow_val
    ds["length"] = length
    fbc = graph.getDoubleProperty("fbc")
    graph.applyDoubleAlgorithm("Flow Betweenness", fbc, ds)

    # Clean up the graph
    for e in graph.getEdges():
        if not is_road[e]:
            graph.delEdge(e)

    # Output Average path length and total flow values
    print("Average path length: ", ds["Average path length"])
    print("Total flow: ", ds["Total flow"])
Exemplo n.º 9
0
def main(graph):
    g = graph.addCloneSubGraph("clone")
    id_ = g['id']

    gDegree = graph.addCloneSubGraph('Degree')
    gBetweenness = graph.addCloneSubGraph("Betweenness Centrality")
    gPageRank = graph.addCloneSubGraph("Page Rank")
    gEccentricity = graph.addCloneSubGraph("Eccentricity")
    betweennessDs = tlp.getDefaultPluginParameters("Betweenness Centrality", g)
    betweennessDs['target'] = "nodes"

    allCentralities = [
        {
            'Name': 'Degree',
            'max': True,
            'graph': gDegree,
            'currentAvgPath': 0,
            'currLengthComponent': 0
        },
        {
            'Name': 'Betweenness Centrality',
            'ds': betweennessDs,
            'max': True,
            'graph': gBetweenness,
            'currentAvgPath': 0,
            'currLengthComponent': 0
        },
        {
            'Name': 'Page Rank',
            'max': False,
            'graph': gPageRank,
            'currentAvgPath': 0,
            'currLengthComponent': 0
        },
        {
            'Name': 'Eccentricity',
            'max': False,
            'graph': gEccentricity,
            'currentAvgPath': 0,
            'currLengthComponent': 0
        },
    ]

    with open('allCentralities.csv', 'w') as f:
        writer = csv.writer(f)

        writer.writerow([
            'nb_sommets', 'mesure#1', 'taille_composante#1', 'lg moyenne#1',
            'mesure#2', 'taille_composante#2', 'lg moyenne#2', 'mesure#3',
            'taille_composante#3', 'lg moyenne#3', 'mesure#4',
            'taille_composante#4', 'lg moyenne#4', 'mesure#5',
            'taille_composante#5', 'lg moyenne#5'
        ])

        for i in range(100):
            for centrality in allCentralities:
                currGraph = graph.getDescendantGraph(centrality['Name'])

                if ("ds" in centrality):
                    cent = tlp.DoubleProperty(currGraph)
                    currGraph.applyDoubleAlgorithm(centrality['Name'],
                                                   centrality['ds'])
                else:
                    cent = currGraph.getDoubleProperty(centrality['Name'])
                    currGraph.applyDoubleAlgorithm(centrality['Name'], cent)

                if (centrality['max']):
                    n = cent.getNodesEqualTo(cent.getNodeMax(currGraph),
                                             currGraph).next()
                else:
                    n = cent.getNodesEqualTo(cent.getNodeMin(currGraph),
                                             currGraph).next()

                currGraph.delNode(n)

                comp = tlp.ConnectedTest.computeConnectedComponents(currGraph)
                maxl = 0
                for n in comp:
                    if (len(n) > maxl):
                        maxl = len(n)
                centrality['currentAvgPath'] = tlp.averagePathLength(currGraph)
                centrality[
                    'currLengthComponent'] = maxl / currGraph.numberOfNodes()

            writer.writerow([
                i, 'Degree', allCentralities[0]['currLengthComponent'],
                allCentralities[0]['currentAvgPath'], 'Betweenness',
                allCentralities[1]['currLengthComponent'],
                allCentralities[1]['currentAvgPath'], 'Page Rank',
                allCentralities[2]['currLengthComponent'],
                allCentralities[2]['currentAvgPath'], 'Eccentricity',
                allCentralities[3]['currLengthComponent'],
                allCentralities[3]['currentAvgPath']
            ])
Exemplo n.º 10
0
# Import a grid approximation (with default parameters)
graph = tlp.importGraph("Grid Approximation")

viewLayout = graph.getLayoutProperty("viewLayout")
viewSize = graph.getSizeProperty("viewSize")
viewBorderWidth = graph.getDoubleProperty("viewBorderWidth")
viewColor = graph.getColorProperty("viewColor")
viewLabel = graph.getStringProperty("viewLabel")

# Apply an FM^3 layout on it
fm3pParams = tlp.getDefaultPluginParameters("FM^3 (OGDF)", graph)
fm3pParams["Unit edge length"] = 100
graph.applyLayoutAlgorithm("FM^3 (OGDF)", viewLayout, fm3pParams)

# Compute an anonymous degree property
degree = tlp.DoubleProperty(graph)
degreeParams = tlp.getDefaultPluginParameters("Degree")
graph.applyDoubleAlgorithm("Degree", degree, degreeParams)

# Map the node sizes to their degree
sizeMappingParams = tlp.getDefaultPluginParameters("Metric Mapping", graph)
sizeMappingParams["property"] = degree
sizeMappingParams["min size"] = 1
sizeMappingParams["max size"] = 30
graph.applySizeAlgorithm("Metric Mapping", viewSize, sizeMappingParams)

# Create a heat map color scale
heatMap = tlp.ColorScale(
    [tlp.Color(0, 255, 0),
     tlp.Color(0, 0, 0),
     tlp.Color(255, 0, 0)])