if __name__ == '__main__':

    # experimental setup
    seed = 200494
    random.seed(seed)
    numpy.random.seed(seed)

    graph_size = 5000
    no_infected_nodes = round(0.01 * graph_size)
    m = 20

    graph = nx.barabasi_albert_graph(graph_size, m, seed)

    # creating noisy graph
    noisy_graph = NoisyGraph()
    noisy_graph.add_edges_from(graph.edges, real=True)

    # printing headers
    print(
        'fraction,graph_uncertainty,mean_uncertainty,std_dev_uncertainty,min_uncertainty,max_uncertainty,'
        'number_contacts')

    # generating 20 observations
    for i in range(0, 101, 5):
        # obtaining fraction
        fraction = i / 100

        # adding edges from missing_edges list
        noisy_graph.add_missing_edges_per_node_ensuring_fraction(fraction)
        'centrality_metric,mean_se_value,min_se_value,max_se_value')

    # set of old_experiments for every graph size
    for m in ms:

        # generating original graph
        graph = nx.barabasi_albert_graph(graph_size, m, seed)

        # obtaining original centrality metrics
        original_metrics = {
            alg.__name__: alg(graph)
            for alg in centrality_algorithms
        }

        # creating noisy graph
        noisy_graph = NoisyGraph()
        noisy_graph.add_edges_from(graph.edges, real=True)

        # obtaining missing edges list
        missing_edges = noisy_graph.missing_edges()
        random.shuffle(missing_edges)

        # starting counters
        no_missing_edges = len(missing_edges)
        start_index = 0

        # generating 20 observations
        for i in range(0, 101, 5):
            # obtaining ending index
            fraction = i / 100
            end_index = round(no_missing_edges * fraction)
Exemple #3
0
          'centrality_metric,mean_se_value,min_se_value,max_se_value')

    # model setup
    m = 20

    # set of old_experiments for every graph size
    for graph_size in graph_sizes:

        # generating original graph
        graph = nx.barabasi_albert_graph(graph_size, m, seed)

        # obtaining original centrality metrics
        original_metrics = {alg.__name__: alg(graph) for alg in centrality_algorithms}

        # creating noisy graph
        noisy_graph = NoisyGraph()
        noisy_graph.add_edges_from(graph.edges, real=True)

        # generating 20 observations
        for i in range(0, 101, 5):
            # obtaining fraction
            fraction = i / 100

            # adding edges from missing_edges list
            noisy_graph.add_missing_edges_per_node_ensuring_fraction(fraction)

            # calculating uncertainty values
            graph_uncertainty = noisy_graph.uncertainty()
            mean_uncertainty, std_dev_uncertainty, min_uncertainty, max_uncertainty = noisy_graph.uncertainty_profile()

            # disturbing graph