コード例 #1
0
ファイル: test_function.py プロジェクト: iaciac/networkx
def test_is_empty():
    graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()]
    for G in graphs:
        assert_true(nx.is_empty(G))
        G.add_nodes_from(range(5))
        assert_true(nx.is_empty(G))
        G.add_edges_from([(1, 2), (3, 4)])
        assert_false(nx.is_empty(G))
コード例 #2
0
def test_is_empty():
    graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()]
    for G in graphs:
        assert nx.is_empty(G)
        G.add_nodes_from(range(5))
        assert nx.is_empty(G)
        G.add_edges_from([(1, 2), (3, 4)])
        assert not nx.is_empty(G)
コード例 #3
0
ファイル: mstweb.py プロジェクト: lteu/cgts
def analysis(edges, node_dic, class_dic={}):
    G = nx.Graph()

    for edge in edges:
        x = edge[0]
        y = edge[1]
        G.add_edge(x, y)

    graph_degrees = G.degree()

    degree_sequence = sorted([d for n, d in G.degree()],
                             reverse=False)  # mac default ?

    degreeCount = collections.Counter(degree_sequence)
    degreeDist = list(degreeCount.items())

    # # remove disconnected nodes to calculate Radius and Graph centers.
    if not nx.is_empty(G):
        largest_cc = max(nx.connected_components(G))
        nodelist = G.nodes()
        to_remove = set(nodelist) - set(largest_cc)
        for nd in to_remove:
            G.remove_node(nd)

    radius, eccentricity, center = "", "", ""
    if not nx.is_empty(G):
        if nx.is_connected(G):
            radius = nx.radius(G)
            eccentricity = nx.eccentricity(G)
            center = nx.center(
                G
            )  # The center is the set of nodes with eccentricity equal to radius.

    mod_score = -2

    # check if it makes sense to calculate the modularity score
    aTestNodeName = "" if len(list(node_dic.keys())) == 0 else node_dic[list(
        node_dic.keys())[0]]
    if bool(class_dic.keys()
            ) and aTestNodeName != "" and aTestNodeName in class_dic:

        inv_dic = {}
        nodelist = G.nodes()
        for n in nodelist:
            converted_num = node_dic[n]
            class_tmp = class_dic[converted_num]
            if class_tmp not in inv_dic:
                inv_dic[class_tmp] = set([n])
            else:
                inv_dic[class_tmp].add(n)
        groups = [x for k, x in inv_dic.items()]
        mod_score = nx_comm.modularity(G, groups)
    return radius, eccentricity, center, degreeDist, mod_score, G
コード例 #4
0
ファイル: main.py プロジェクト: elaesme/Movie-Search-Engine
def graph(listMovies):
    G = nx.Graph() 
    couple=[]
    count=0
    for i in range(len(listMovies)):
        for j in range(len(listMovies[i])):
            for y in range(1,len(listMovies[i])):
                couple=[listMovies[i][j],listMovies[i][y]]
                count=0
                for k in listMovies:
                    if (couple[0] in k) and (couple[1] in k):
                        
                        count+=1
                        if count>=2 and not couple[0] == couple[1]:
                            
                            G.add_edge(couple[0],couple[1])
                            break
     
 
    
    if not nx.is_empty(G):
        pos = nx.spring_layout(G)   #<<<<<<<<<< Initialize this only once
        nx.draw(G,pos=pos, with_labels=True, node_size = 100, font_size=10)
        nx.draw_networkx_nodes(G,pos=pos, with_labels=True, node_size = 1500, font_size=10)
        nx.draw_networkx_edges(G, pos, alpha=0.3)#<<<<<<<<< pass the pos variable
        #plt.draw() 
        plt.figure(figsize=(8, 8))  # image is 8 x 8 inches
         # To plot the next graph in a new figure
        plt.show()
コード例 #5
0
 def isGraphEmtpy(self):
     try:
         if nx.is_empty(self.G):
             raise nx.NetworkXError('No graph found!')
     except Exception as e:
         messagebox.showerror(title=None, message=e)
         raise e
コード例 #6
0
def drawGraph_Func1(result, typeDistance):
    G = nx.Graph()
    E_color = ''

    for i in result:
        G.add_node(i)

        for i in result:

            for j in result:
                edge = CAL.get_edge(i, j, [typeDistance])

                if not edge == None:
                    G.add_edge(i, j)
    if not nx.is_empty(G):
        colors = ["red"] + (["cyan"] * (len(result) - 1))
        pos = nx.spring_layout(G)  #<<<<<<<<<< Initialize this only once
        nx.draw(G, pos, with_labels=True, font_size=25)
        nx.draw_networkx_nodes(G, pos, node_color=colors, node_size=1000)
        if typeDistance == "Distance":
            nx.draw_networkx_edges(G, pos, edge_color='blue', width=3)
        elif typeDistance == "Time":
            nx.draw_networkx_edges(G, pos, edge_color='purple', width=3)
        else:
            nx.draw_networkx_edges(G, pos, edge_color='red', width=3)

        plt.show()
コード例 #7
0
def load_graph(filename: str,
               graph_name: str,
               format_type='edge_list',
               is_directed=False):
    """
    Load directed or undirected graph from file with specific format

    :param filename: File to read graph
    :param graph_name: Graph name
    :param format_type: File format of saved graph e.g.: `edge_list`
    :param is_directed: Load as directed graph?
    :return: g: nx.Graph
    """
    if not os.path.isfile(filename):
        logger.error(f"File: [{filename}] does not exist")
        return

    if format_type == 'edge_list':
        graph_type = nx.DiGraph if is_directed else nx.Graph
        g = nx.read_edgelist(filename, create_using=graph_type)
        add_graph_name(g, graph_name)
        if nx.is_empty(g):
            logger.info("Loaded graph is empty")
        return g
    else:
        logger.error(f"Unknown graph input format: [{format_type}]")
        return
コード例 #8
0
def network_graph_nx(root):
    """Purpose of this function is to create a Network graph for visualization of each individual object given in the KNOSSOS XML input file"""

    # Need to iterate over each of the objects in the XML file to add individual skeletons to the Networkx graph:
    final_nx_graph = nx.Graph()
    position_labels = xy_node_position_dict(root)

    for thing in parse.root.iter('thing'):
        # Creating the dataframe with node information as input:
        node_df = ed.ed_per_node(thing)

        # Designating the nodes to be used in the Network graph, using if statement to add nodes as part of separate object:
        network_graph_nx = nx.Graph()
        if nx.is_empty(network_graph_nx):
            network_graph_first = nx.from_pandas_edgelist(node_df, 'Source ID', 'Target ID', edge_attr='Euclidean Distance')
            network_graph_nx.add_edges_from(network_graph_first.edges())
            network_graph_nx.add_nodes_from(network_graph_first.nodes())
        else:
            network_graph_add = nx.from_pandas_edgelist(node_df, 'Source ID', 'Target ID', edge_attr='Euclidean Distance')
            network_graph_nx.update(network_graph_add)

        # Appending the final networkx graph and saving it as a .graphml file type:
        final_nx_graph.update(network_graph_nx)

    # Adding node attributes such as position and skeleton group:
    # positions = nx.spring_layout(final_nx_graph, dim=3, k=None, pos=position_labels, fixed=None, iterations=50, weight='weight', scale=1.0)
    nx.set_node_attributes(final_nx_graph, position_labels, name='Position')

    return final_nx_graph
コード例 #9
0
ファイル: luca.moroldo.py プロジェクト: lucamoroz/IA-dei
def ampiezza(grafo, start, end):
    '''
    Apply BFS to find the path from a node to another
    :param grafo: networkx.Graph 
    :param start: name of start node
    :param end: name of destination node
    :return: Path from start to end if exists, empty list otherwise
    '''
    if nx.is_empty(grafo) or not grafo.has_node(start) or not grafo.has_node(
            end) or start == end:
        return []
    queue = [start]
    while queue:
        curr = queue.pop(0)
        for node in grafo.neighbors(curr):
            # skip iteration if node already visited
            if grafo.nodes[node]['visitato']:
                continue
            if node == end:
                return grafo.nodes(data=True)[curr]['percorso'] + [node]
            else:
                grafo.nodes[node][
                    'percorso'] = grafo.nodes[curr]['percorso'] + [node]
                grafo.nodes[node]['visitato'] = True
                queue.append(node)
    # no path found
    return []
コード例 #10
0
    def run_inference(self, debug=True, return_results=True):
        """
        Run Inference on network with given evidences.
        """
        g_temp = copy.deepcopy(self.g)
        self._log = self.log.setup_logger(debug=debug)
        self._log.debug("Started")

        if all(x == None for x in self.evidences.values()):
            self._log.debug(
                "No evidences were set. Proceeding without evidence")

        self.parameters = dict.fromkeys(self.nodes)
        self.calculated_means = copy.deepcopy(self.evidences)
        self.calculated_vars = dict.fromkeys(self.nodes)
        self.done_flags = dict.fromkeys(self.nodes)

        it = 0
        while not nx.is_empty(g_temp):
            it += 1
            pure_children = self.__get_pure_root_nodes(g_temp)
            for child in pure_children:
                if self.evidences[child] is None:
                    self.calculated_means[child], self.calculated_vars[
                        child] = self.__get_node_values(child)
                    self.__print_message(self._log, child)
                else:
                    self._log.debug(
                        f"Skipped Calculating:'{child}' as evidence is available."
                    )
                g_temp.remove_nodes_from(list(g_temp.pred[child]))

        return self.__build_results()
コード例 #11
0
def pajek_to_files(name, url, pajek_lines, dir_name):
    if pajek_lines:
        try:
            G = nx.parse_pajek(pajek_lines)
            if not nx.is_empty(G):
                old_attributes = list(G.nodes)
                G = nx.convert_node_labels_to_integers(G)
                id_mapping = []
                node_list = list(G.nodes)
                for i in range(len(node_list)):
                    id_mapping.append([old_attributes[i], str(node_list[i])])
                mapping_file = open('..' + dir_name +
                                    '/node_id_mappings/mapping_' +
                                    url.split('/')[-1] + '.csv',
                                    'w',
                                    newline='')
                mapping_file_writer = csv.writer(mapping_file)
                mapping_file_writer.writerow(['id', 'name'])
                for tup in id_mapping:
                    mapping_file_writer.writerow(list(tup))
                nx.write_edgelist(G,
                                  '..' + dir_name + '/edge_lists/' +
                                  url.split('/')[-1] + '.csv',
                                  delimiter=',')
                insert_into_db(
                    name, url,
                    dir_name + '/edge_lists/' + url.split('/')[-1] + '.csv',
                    dir_name + '/node_id_mappings/mapping_' +
                    url.split('/')[-1] + '.csv', G.is_directed(),
                    G.is_multigraph(), int(G.number_of_nodes()),
                    int(nx.number_of_selfloops(G)))
        except Exception as e:
            traceback.print_exc()
            print(e)
            print("Couldn't parse " + url)
コード例 #12
0
def network_five_function(year):
    """具体函数"""
    file_path = '../data/生成数据/03关系矩阵/五年期/' + str(year) + '-' + str(
        year + 4) + '年竞争关系矩阵.csv'
    if os.path.exists(file_path):
        graph = create_diagrams(file_path)
        if not nx.is_empty(graph):
            network_indicators = calculate_networks_indicators(
                graph, year, '五年期')
            excel_path = '../data/生成数据/04关系矩阵_网络指标/五年期/' + file_path[-20:-4]
            folder = os.path.exists(excel_path)
            if not folder:
                os.makedirs(excel_path)
            network_indicators.to_excel(excel_writer=excel_path + '/相关指标.xlsx',
                                        index=False)

            address = pd.read_excel(
                io='../data/生成数据/01企业名称修改表/13整车数据ZZCMCZ编码表.xlsx',
                usecols=[0, 1],
                converters={
                    'ZZCMC编号': str,
                    'ZZCMC企业名称': lambda x: x.strip()
                })
            address = address.rename(columns={
                'ZZCMC编号': 'nodes',
                'ZZCMC企业名称': 'institution_name'
            })
            network_indicators_address = pd.merge(left=network_indicators,
                                                  right=address,
                                                  how='left',
                                                  on=['nodes'])
            network_indicators_address['year'] = year + 4
            network_indicators_address.to_excel(excel_writer=excel_path +
                                                '/相关指标(企业名称).xlsx',
                                                index=False)
コード例 #13
0
def exchange_shifts(workers: List[Worker]):
    # init graph
    g = nx.DiGraph()
    g.add_nodes_from(workers)  # worker node
    g.add_nodes_from([0, len(workers) - 1])  # shift node

    for w in workers:
        g.add_edge(w.current_shift, w)  # add edge from shift to worker
        g.add_edge(w, w.wanted_shift)  # add edge from worker to wanted shift

    while not nx.is_empty(g):
        # find cycle and update shifts and remove nodes
        for cycle in nx.simple_cycles(g):
            for node in cycle:
                if not isinstance(node,
                                  int):  # go on the workers and match shifts
                    if not node.wanted_shift == node.current_shift:  # if the shift was switched
                        print(node.name, "moves from shift",
                              node.current_shift, "to shift",
                              node.wanted_shift)
                    else:
                        print(node.name, "stays with shift",
                              node.current_shift)
            g.remove_nodes_from(cycle)

        # update new wanted shifts
        for w in list(g.nodes):
            if not isinstance(w, int):
                while not g.has_node(
                        w.wanted_shift):  # search the first shift that exists
                    w.delete_first()
                g.add_edge(
                    w, w.wanted_shift)  # add edge from worker to wanted shift
コード例 #14
0
def solve(G):
    """
    Args:
        G: networkx.Graph

    Returns:
        T: networkx.Graph
    """

    end_time = datetime.now() + timedelta(seconds=TIMEOUT)

    graphs_to_consider = []
    graphs_to_consider.append(minimum_spanning_tree(G))
    index = 0
    while datetime.now() < end_time and index < len(G.nodes):
        resulting_graph = create_graph_from_dominating_set(
            G, dominating_set(G, start_with=index))
        #print( average_pairwise_distance(resulting_graph))
        #resulting_graph = optimize_pairwise_distances(G, resulting_graph, average_pairwise_distance(resulting_graph))

        if not nx.is_empty(resulting_graph) and nx.is_connected(
                resulting_graph):
            graphs_to_consider.append(resulting_graph)
        index += 1
    return min(graphs_to_consider, key=average_pairwise_distance)
コード例 #15
0
def spectral_gap_sparse(G):
    if (not nx.is_empty(G)):
        if (nx.is_connected(G)):
            d = []
            # start = time.time()

            for elem in list(G.nodes):
                deg = len(list(G.neighbors(elem)))
                d.append(1 / deg)

            Aa100 = nx.to_scipy_sparse_matrix(G)
            invD = np.diag(d)
            n = len(d)
            out = sp.sparse.csr_matrix(invD)
            P = out * Aa100

            if (len(G.nodes()) > 2):
                spettro = sp.sparse.linalg.eigsh(P,
                                                 k=2,
                                                 which="LA",
                                                 return_eigenvectors=False)

                spettro = sorted(spettro, reverse=True)
                sg = spettro[0] - spettro[1]
            else:
                sg = 0
        else:
            sg = 0
    else:
        sg = 0

    sg_trans = float(sg)
    return (sg_trans)
コード例 #16
0
ファイル: interfaces.py プロジェクト: dPys/PyNets
    def prune_graph(self):
        import graspologic.utils as gu
        from pynets.statistics.individual.algorithms import defragment, \
            prune_small_components, most_important

        hardcoded_params = utils.load_runconfig()

        if int(self.prune) not in range(0, 4):
            raise ValueError(f"Pruning option {self.prune} invalid!")

        if self.prune != 0:
            # Remove isolates
            G_tmp = self.G.copy()
            self.G = defragment(G_tmp)[0]
            del G_tmp

        if int(self.prune) == 1:
            try:
                self.G = prune_small_components(
                    self.G, min_nodes=hardcoded_params["min_nodes"][0])
            except BaseException:
                print(
                    UserWarning(f"Warning: pruning {self.est_path} "
                                f"failed..."))
        elif int(self.prune) == 2:
            try:
                hub_detection_method = \
                hardcoded_params["hub_detection_method"][0]
                print(f"Filtering for hubs on the basis of "
                      f"{hub_detection_method}...\n")
                self.G = most_important(self.G, method=hub_detection_method)[0]
            except FileNotFoundError as e:
                import sys
                print(e, "Failed to parse advanced.yaml")

        elif int(self.prune) == 3:
            print("Pruning all but the largest connected "
                  "component subgraph...")
            self.G = gu.largest_connected_component(self.G)
        else:
            print("No graph defragmentation applied...")

        self.G = nx.from_numpy_array(self.in_mat)

        if nx.is_empty(self.G) is True or \
            (np.abs(self.in_mat) < 0.0000001).all() or \
                self.G.number_of_edges() == 0:
            print(
                UserWarning(f"Warning: {self.est_path} "
                            f"empty after pruning!"))
            return self.in_mat, None

        # Saved pruned
        if (self.prune != 0) and (self.prune is not None):
            final_mat_path = f"{self.est_path.split('.npy')[0]}{'_pruned'}"
            utils.save_mat(self.in_mat, final_mat_path, self.out_fmt)
            print(f"{'Source File: '}{final_mat_path}")

        return self.in_mat, final_mat_path
コード例 #17
0
ファイル: Attendee.py プロジェクト: ceciledebezenac/el_farol
 def find_friends(self):
     #find the neighbors in the graph not in the grid
     if nx.is_empty(self.model.G) == False:
         self.friends = self.model.G[self]
         self.friend_attendance = np.array(
             [f.attendance for f in self.friends])
     else:
         return (None)
コード例 #18
0
def calculate_enterprise_indicators_three_year_ipcmg():
    """计算企业网络指标(三年期IPCMG)"""
    institutions = pd.read_excel(io='../data/生成数据/06专利数据/汽车产业(企业名称).xlsx')

    description_columns = [str(i) + '-' + str(i + 2) for i in range(1985, 2013)]
    description_information = pd.DataFrame(columns=description_columns, index=list(institutions['企业名称']))

    statistical_columns = ['企业名称', '年份', 'ipcmgdgr3yw', 'ipcmgbtw3yw', 'ipcmglcrch3yw', 'ipcmgcnstr3yw',
                           'ipcmgeffsz3yw', 'ipcmgtrgl3yw', 'ipcmgclust3yw', 'ipcmgtirstr3yw', 'ipcmgntsz3yw',
                           'ipcmgntdnst3yw', 'ipcmgcliq3yw', 'ipcmgeffc3yw', 'ipcmgisolt3yw']
    statistical_information = pd.DataFrame(columns=statistical_columns)
    index_symbol = 0

    for i in range(len(institutions)):
        each_institutions = institutions['企业名称'].iloc[i]
        for j in range(1985, 2013):
            years_interval = str(j) + '-' + str(j + 2)
            file_path = '../data/生成数据/07IPCMGSG网络/三年期IPCMG/' + each_institutions + '/' + years_interval + '年IPCMG矩阵.csv'
            if os.path.exists(file_path):
                graph = create_diagrams(file_path)
                if not nx.is_empty(graph):
                    description_information.loc[each_institutions, years_interval] = 1
                    network_indicators = calculate_networks_indicators(graph)
                    excel_path = '../data/生成数据/07IPCMGSG网络/三年期IPCMG(网络指标)/' + each_institutions + '/' + years_interval + '年IPCMG矩阵'
                    folder = os.path.exists(excel_path)
                    if not folder:
                        os.makedirs(excel_path)
                    network_indicators.to_excel(excel_writer=excel_path + '/相关指标.xlsx',
                                                index=False)
                    del network_indicators['nodes']
                    network_indicators_average = network_indicators.mean()
                    row_information = [each_institutions, j + 2,
                                       network_indicators_average['degree_centrality'],
                                       network_indicators_average['betweenness_centrality'],
                                       network_indicators_average['local_reaching_centrality'],
                                       network_indicators_average['constraint'],
                                       network_indicators_average['effective_size'],
                                       network_indicators_average['triangles'],
                                       network_indicators_average['clustering'],
                                       network_indicators_average['tie_strength'],
                                       network_indicators_average['number_of_node'],
                                       network_indicators_average['density'],
                                       network_indicators_average['cliques'],
                                       network_indicators_average['efficiency'],
                                       network_indicators_average['isolates']]
                    statistical_information.loc[index_symbol] = row_information
                    index_symbol += 1
                else:
                    description_information.loc[each_institutions, years_interval] = 0
            else:
                description_information.loc[each_institutions, years_interval] = None

    description_information = description_information.reset_index(drop=False)
    description_information.to_excel(excel_writer='../data/生成数据/07IPCMGSG网络/统计信息/01三年期IPCMG矩阵描述信息.xlsx',
                                     index=False)
    statistical_information.to_excel(excel_writer='../data/生成数据/07IPCMGSG网络/统计信息/02三年期IPCMG网络指标信息.xlsx',
                                     index=False)
コード例 #19
0
ファイル: micro_rca.py プロジェクト: meng2468/AIOps-Challenge
    def detect(self, traces_df, kpis, visualize=False):
        traces_df = self.process(traces_df)

        # Parse the traces and kpis
        parsed_traces = self.parse_traces(traces_df)

        # FIXME possible case where system doesn't answer for a long time and wasn't called

        #check for anomaly
        # 1 - find outlier in elapsed
        #   1.1 microRCA

        traces = self.get_anomalous_traces(traces_df)

        # Hosts + Service
        # Each service connects to all the services it communicates with and all hosts it connects to (no need to differentiate!)
        DG = nx.DiGraph()
        for trace in traces:
            DG = self.trace_graph(parsed_traces[trace], DG)

        if visualize:
            print(DG.nodes(data=True), len(DG.nodes()))

            plt.figure(figsize=(9, 9))
            #pos = nx.spring_layout(DG)
            # pos = nx.draw_shell(DG)
            # nx.draw(DG, pos, with_labels=True, cmap=plt.get_cmap('jet'), node_size=0, arrows=True)
            nx.draw_shell(DG, with_labels=True)
            # nx.draw_networkx_nodes(DG, pos, nodelist=hosts, node_color="r", node_size=1500)
            # nx.draw_networkx_nodes(DG, pos, nodelist=services, node_color="b", node_size=500)
            # nx.draw_networkx_edges(DG, pos, width=1.0, alpha=0.5)

            labels = nx.get_edge_attributes(DG, 'weight')
            # nx.draw_networkx_edge_labels(DG, pos, edge_labels=labels)
            plt.savefig('output.png')

        # print(f'[DEBUG] Graph is {"connected" if nx.is_weakly_connected(DG) else "not connected"}')
        # Extract anomalous subgraph
        anomaly_DG, anomalous_edges = self.get_anomalous_graph(
            DG, traces, parsed_traces)

        if nx.is_empty(anomaly_DG):
            raise ValueError('No anomaly detected')

        # Faulty service localization
        # Update weights of anomalous graph
        #           Use cases from the paper
        # Get personalization vector (Transition Probability Matrix)
        # Reverse the service-service edges
        # Apply pagerank
        parsed_kpis = self.parse_kpis(kpis)

        result = self.get_fault_service(anomaly_DG, anomalous_edges, traces_df,
                                        parsed_kpis)

        return result
コード例 #20
0
def connected(g, index):
    if nx.is_empty(g) or nx.is_connected(g):
        return g
    else:
        # largest_cc = max(nx.connected_components(g), key=len)
        for component in nx.connected_components(g):
            if index in component:
                largest_cc = component
        subG = g.subgraph(largest_cc)
        return subG
コード例 #21
0
ファイル: auto_fill.py プロジェクト: dcwhyme/inkstitch
def check_graph(graph, shape, max_stitch_length):
    if networkx.is_empty(graph) or not networkx.is_eulerian(graph):
        if shape.area < max_stitch_length**2:
            message = "This shape is so small that it cannot be filled with rows of stitches.  " \
                      "It would probably look best as a satin column or running stitch."
            raise InvalidPath(_(message))
        else:
            message = "Cannot parse shape.  " \
                      "This most often happens because your shape is made up of multiple sections that aren't connected."
            raise InvalidPath(_(message))
コード例 #22
0
ファイル: test_graphs.py プロジェクト: david-zwicker/py-utils
 def test_conncet_components_error(self):
     g = nx.Graph()
     gc = graphs.connect_components(g, 'pos')
     self.assertTrue(nx.is_empty(gc))
     
     g.add_node(0)
     g.add_node(1, pos=(1, 1))
     g.add_edge(0, 1)
     self.assertRaises(ValueError,
                       lambda: graphs.connect_components(g, 'pos'))
コード例 #23
0
def save_file_edgelist(G):
    if not nx.is_empty(G):
        nome = input("Inserire un nome per il salvataggio del grafo: ")
        nome = '' + nome + '.txt'
        f = open(nome, 'wb')
        nx.write_edgelist(G, f)
        print("Sotto-grafo salvato correttamente.")
        f.close()
    else:
        print("Non c'è nessun sotto-grafo da salvare.")
コード例 #24
0
    def addNodeToGraph(self):
        self.figure.clf()

        if nx.is_empty(self.the_graph):
            self.the_graph.add_node(0)
            # self.color_map[0] = 'blue'

        self.the_graph.add_node(list(self.the_graph.nodes)[-1] + 1)
        # self.color_map[list(self.the_graph.nodes)[-1]] = 'blue'

        nx.draw(self.the_graph, with_labels=True)
        self.canvas.draw_idle()
コード例 #25
0
def get_matching_polynomial_recursive(graph):
    if nx.is_empty(graph):
        return np.asarray([1] + [0] * graph.number_of_nodes())
    graph_copy = graph.copy()
    x = list(graph_copy.out_edges)[0]
    graph_copy.remove_edges_from([x])
    edge_removed_matching = get_matching_polynomial_recursive(graph_copy)
    graph_copy.remove_nodes_from([x[0], x[1]])
    vertex_removed_matching = get_matching_polynomial_recursive(graph_copy)
    vertex_removed_matching = np.pad(
        vertex_removed_matching,
        (edge_removed_matching.shape[0] - vertex_removed_matching.shape[0], 0),
        'constant')
    return edge_removed_matching - vertex_removed_matching
コード例 #26
0
ファイル: main.py プロジェクト: afshinfard/temp
def determine_safer_backbones(g, pruning_threshold=100):
    """"Determine the backbones of the graph
    with ambiguous nodes being removed """
    g = g.copy()
    backbones = []
    while not nx.is_empty(g):
        gmst = g
        paths = determine_safer_backbones_of_trees(gmst, pruning_threshold)
        backbones.extend(paths)
        vertices = [u for path in paths for u in path]
        neighbors = [v for u in vertices for v in g.neighbors(u)]
        g.remove_nodes_from(vertices)
        g.remove_nodes_from(neighbors)
    backbones.sort(key=len, reverse=True)
    return backbones
コード例 #27
0
def partition_here(graph):
    
    if nx.is_empty(graph):
        return 0, 0
    Gcc = sorted(nx.connected_components(graph), key=len, reverse=True)
    G = graph.subgraph(Gcc[0])
    settings = nxmetis.MetisOptions(ncuts=4, niter=200, ufactor=280)
    par = nxmetis.partition(G, 2, options=settings)
    
    community1 = par[1][0]
    community2 = par[1][1]

    rwc = np.mean(randomwalk_polarization(G, 100, 0.02, 1000, community1, community2))
    prc = len(G)/len(graph)
    
    return rwc, prc
コード例 #28
0
    def construct_state_graph(assignments_dict, id_metatile_file):
        id_metatile_map = read_pickle(id_metatile_file)
        state_graph = nx.DiGraph()
        for (tile_x, tile_y), tile_id in assignments_dict.items():
            metatile = Metatile.from_str(id_metatile_map.get(tile_id))
            metatile_graph = nx.DiGraph(metatile.graph_as_dict)
            if nx.is_empty(metatile_graph):
                pass
            else:
                unnormalized_graph = Metatile.get_normalized_graph(
                    metatile_graph,
                    coord=(tile_x * TILE_DIM, tile_y * TILE_DIM),
                    normalize=False)
                state_graph = nx.compose(state_graph, unnormalized_graph)

        return state_graph
コード例 #29
0
def clu_coe_attack(graph, centrality_metric):
    iters = 0
    graph = graph.copy()
    clu_coe = []
    ranks = centrality_metric(graph)
    nodes = sorted(graph.nodes(), key=lambda n: ranks[n])
    pos = nx.spring_layout(graph)
    while nx.is_connected(graph) and not nx.is_empty(graph):
        clu_coe.append(nx.average_clustering(graph))
        graph.remove_node(nodes.pop())
        file_name = './pics/' + str(iters) + '.png'
        # graph_snap(graph, pos, file_name)
        iters += 1
    else:
        file_name = './pics/' + str(iters) + '.png'
        # graph_snap(graph, pos, file_name)
        return clu_coe
コード例 #30
0
def rand_cleu_coe_attack(graph, n):
    iters = 0
    graph = graph.copy()
    clu_coe = []
    nodes = graph.nodes()
    pos = nx.spring_layout(graph)
    while nx.is_connected(graph) and not nx.is_empty(graph):
        sample = random.sample(nodes, n)
        clu_coe.append(nx.average_clustering(graph))
        graph.remove_nodes_from(sample)
        file_name = './pics/' + str(iters) + '.png'
        # graph_snap(graph, pos, file_name)
        iters += 1
    else:
        file_name = './pics/' + str(iters) + '.png'
        # graph_snap(graph, pos, file_name)
        return clu_coe
コード例 #31
0
def rand_den_attack(graph, n):
    iters = 0
    graph = graph.copy()
    density = []
    nodes = graph.nodes()
    pos = nx.spring_layout(graph)
    while nx.is_connected(graph) and not nx.is_empty(graph):
        sample = random.sample(nodes, min(len(nodes), n))
        density.append(nx.density(graph))
        graph.remove_nodes_from(sample)
        file_name = './pics/' + str(iters) + '.png'
        # graph_snap(graph, pos, file_name)
        iters += 1
    else:
        file_name = './pics/' + str(iters) + '.png'
        # graph_snap(graph, pos, file_name)
        return density
コード例 #32
0
    def removeNodeFromGraph(self):
        if nx.is_empty(self.the_graph):
            return

        i, okPressed = QInputDialog.getInt(self, "Node to delete",
                                           "Delete which node?",
                                           list(self.the_graph.nodes)[0],
                                           list(self.the_graph.nodes)[0],
                                           list(self.the_graph.nodes)[-1], 1)
        if okPressed:
            self.figure.clf()
            try:
                self.the_graph.remove_node(i)
            except:
                pass
            nx.draw(self.the_graph, with_labels=True)
            self.canvas.draw_idle()
コード例 #33
0
ファイル: graphs.py プロジェクト: david-zwicker/py-utils
def connect_components(graph, pos_attr, length_attr=None):
    """ connects all components by iteratively inserting edges between
    components that have minimal distance.
    
    `graph` is a networkx graph object with positions assigned to nodes
    `pos_attr` gives the key for the node attribute that stores the position
    `length_attr` stores the length of the new edges in this edge attribute
    """
    if nx.is_empty(graph):
        return graph
    
    # build a distance matrix for all nodes
    vertices = nx.get_node_attributes(graph, pos_attr)
    nodes = to_array(vertices.keys())
    positions = to_array(vertices.values())
    dists = distance.squareform(distance.pdist(positions))
        
    if len(vertices) != nx.number_of_nodes(graph):
        raise ValueError("Not all nodes have a position specified by the node "
                         "attribute `%s`" % pos_attr)

    # get all subgraphs and build a list of indices into the distance matrix
    subgraphs = list(nx.connected_component_subgraphs(graph))
    num_subgraphs = len(subgraphs)
    # find the index of each node of each subgraph in the nodes array
    sg_nids_list = [[np.flatnonzero(nodes == n)[0] for n in sg.nodes()]
                    for sg in subgraphs]
    
    assert sum(len(s) for s in sg_nids_list) == nx.number_of_nodes(graph)
    
    # initialize result with first subgraph
    result = subgraphs.pop(0)
    result_nids = sg_nids_list.pop(0)
    
    # iterate until all subgraphs have been added
    while subgraphs:
        # find subgraph that is closest to `result`
        sg_min, nid_sg, nid_res, dist_min = None, None, None, np.inf
        for k, sg_nids in enumerate(sg_nids_list):
            dist_mat = dists[sg_nids, :][:, result_nids]
            x, y = np.unravel_index(dist_mat.argmin(), dist_mat.shape)
            dist = dist_mat[x, y]
            if dist < dist_min:
                sg_min = k  # index into the subgraph
                dist_min = dist  # its distance to `result`
                # store the node indices for the connecting edge
                nid_sg = sg_nids[x]
                nid_res = result_nids[y]

        # add graph to `result`
        result.add_nodes_from(subgraphs[sg_min].nodes(data=True))
        result.add_edges_from(subgraphs[sg_min].edges(data=True))
        
        # add a new edge between the subgraph and `result`
        if length_attr is not None:
            attr_dict = {length_attr: dists[nid_res, nid_sg]}
        else:
            attr_dict = None
        result.add_edge(nodes[nid_res], nodes[nid_sg], **attr_dict)
            
        # remove the subgraph from the to-do list
        result_nids.extend(sg_nids_list.pop(sg_min))
        del subgraphs[sg_min]
        
    assert nx.is_connected(result)
    assert nx.number_of_nodes(result) == len(vertices)
    assert nx.number_of_edges(result) == \
            nx.number_of_edges(graph) + num_subgraphs - 1
        
    return result