def load_airport_and_route(filter_data=True, deep_load=False): # Load airports a_path = airport_path if not deep_load else '../' + airport_path r_path = route_path if not deep_load else '../' + route_path airports = load_csv_col(a_path, cols=airpt_cols) routes = load_csv_col(r_path, cols=rt_cols) # Filter data cutoff = None if filter_data else 0 airports, routes, route_indexes = filter_degree(airports, routes, d_cut_off=cutoff) airports, route_indexes = re_index(airports, route_indexes) return airports, route_indexes
data_path = '../../data/' figure_path = '../../results/' centrality_path = data_path + 'centrality_metrics/' max_entropy_path = data_path + 'max_entropy/' opt_path = data_path + 'optimal_distribution/' versions = ['single_red', 'uniform_red'] optimal_headers = ['Maximum Entropy', 'Centrality', 'Adapted Centrality', 'Clique', 'Numerical Optimal'] centrality_headers = ['Closeness Centrality', 'Eigenvector Centrality', 'Degree Centrality', 'Betweenness Centrality'] max_entropy_headers = ['Eigenvector Centrality', 'Degree Centrality', 'Betweenness Centrality', 'Closeness Centrality'] time_limit = 100 for version in versions: # Get data centrality_data, _ = load_csv_col(centrality_path + version + '.csv', with_headers=True, trans=True, parse=float) max_entropy_data, _ = load_csv_col(max_entropy_path + version + '.csv', with_headers=True, trans=True, parse=float) opt_centrality = load_csv_col(centrality_path + 'opt_' + version + '.csv', trans=True, parse=float) opt_analytical = load_csv_col(opt_path + version + '_trial.csv', parse=float)[0] opt_clique = load_csv_col(data_path + 'clique/' + version + '.csv', parse=float)[0] # Remove data after time-limit for clarity centrality_data, _ = centrality_data[:, :time_limit], centrality_headers[0:4] max_entropy_data, _ = max_entropy_data[:, :time_limit], max_entropy_headers[0:4] # Take optimal metrics opt_entropy = max_entropy_data[1] # Define paths centrality_plot_path = figure_path + 'centrality_metrics/' + version + '.png'
fig_path = '../../results/analysis/uniform.png' # Run trial if fresh_data: # Import data and generate network _, routes = load_airport_and_route(deep_load=True) netx = from_edgelist(routes) N = number_of_nodes(netx) net = network(N, graph=netx) budget = balls_per_node * N print('Data imported and network generated') # Calculate node degree degrees = array(sorted(degree(netx), key=lambda d: d[0]))[:, 1] max_d_node = argmax(degrees) # Initialize opponent distribution red = zeros(N) red[max_d_node] = budget # Run basic metrics net.set_initial_distribution(red=red) exposures = run_polya(net, steps=steps) save_trials(exposures, data_name, single_line=True) else: exposures = load_csv_col(data_name, parse=float) # Plot data plot_infection(exposures, file_name=fig_path)
_, edges = load_airport_and_route(deep_load=True) netx = from_edgelist(edges) N = number_of_nodes(netx) budget = N * balls_per_node net = network(N, graph=netx) print('Data import and network generated') # Find and sort cliques cliques = sorted(find_cliques(netx), key=lambda c: len(c), reverse=True) trial_infections = [] num_cliques = linspace(1, 120, 40).astype(int) for num in num_cliques: popularity_contest(net, num, budget) trial = run_polya(net, trials=2) trial_infections.append(trial[len(trial) - 1]) else: trial_infections, num_cliques = load_csv_col(data_path, with_headers=True, parse=float, trans=True) data = array([num_cliques, trial_infections]) save_trials(trial_infections, data_path, titles=num_cliques, single_line=True) plot_scatter_data(data, x_label='Number of Cliques', y_label='Time n infection', size=fig_size, connect=True, file_name=fig_path)
net = network(N, graph=netx) bud = N * balls_per_node ratios = linspace(.1, .9, 15) budgets = [(bud / ratio - bud) for ratio in ratios] print(ratios) trial_infection = [] for budget in budgets: tmp = round(budget / N) B = [tmp] * N print(budget, tmp) net.set_initial_distribution(black=B) infection = run_polya(net, steps=num_steps, trials=5) trial_infection.append(infection[len(infection) - 1]) save_trials(trial_infection, data_path, titles=ratios, single_line=True) else: [trial_infection], ratios = load_csv_col(data_path, with_headers=True, parse=float) ratios = [1 / (1 + float(ratio)) for ratio in ratios] data = array([ratios, trial_infection]).astype(float) plot_scatter_data(data, file_name=fig_path, x_label='$\\frac{R}{R+B}$', y_label='$I_{250}$', size=fig_size, connect=True)
per_node = time + balls_per_node budget = N * per_node if uniform: red = [per_node] * N else: degrees = dict_to_arr(degree(netx)) red = zeros(N) red[argmax(degrees)] = budget print(sum(red), budget, time) simple_centrality(net, 2, red=red) vals = run_polya(net, steps=time_limit) trial_infection.append(vals) else: trial_infection, delay = load_csv_col(data_name, with_headers=True, trans=True, parse=float) delay = array(delay).astype(float) trial_infection = array(trial_infection) time_n = len(trial_infection[0]) - 1 time_N_infections = trial_infection[:, time_n] # Save and plot data if fresh_data: save_trials(trial_infection, data_name, titles=delay) # plot_infection(trial_infection, leg=delay, multiple=True, file_name=img_name, blocking=False) data = array([delay, time_N_infections]) plot_scatter_data(data, x_label='Time step delay', y_label='$I_{' + str(time_n) + '}$', connect=True, file_name=scatter_name, size=fig_size)
nodes, edges = load_airport_and_route(deep_load=True) netx = from_edgelist(edges) N = number_of_nodes(netx) net = network(N, graph=netx) x_vals = array(range(1, num_steps + 2)) R = [balls_per_node] * N # Maximum Entropy # maximum_entropy(net, metric_id=1) max_ent_infections = run_polya(net, steps=num_steps, combine=False, trials=num_trials) else: max_ent_infections = load_csv_col(max_ent_path, parse=float, trans=True) max_ent_var = var(max_ent_infections, axis=0) if fresh_data: save_trials(max_ent_infections, max_ent_path) max_ent_data = zeros((2, len(max_ent_var))) for i in range(len(max_ent_var)): max_ent_data[0, i] = i + 1 max_ent_data[1, i] = max_ent_var[i] plot_scatter_data(max_ent_data, file_name=fig_path, x_label='Time Step, n', y_label='Variance',
for i in range(num): ind = int(round(cents[i, 0])) B[ind] = cents[i, 1] / total * budget print(sum(B)) net.set_initial_distribution(black=B, red=R) tmp = run_polya(net, steps=time_limit) centrality_infections.append(tmp[len(tmp) - 1]) trial_exposures.append(centrality_infections) trial_exposures = array(trial_exposures) save_trials(trial_exposures, data_path, titles=num_nodes) else: trial_exposures, num_nodes = load_csv_col(data_path, with_headers=True, trans=True, parse=float) num_nodes = array(num_nodes).astype(float) # plot data data = [] for trial in trial_exposures: data.append([num_nodes, trial]) data = array(data) plot_scatter_data([data[0], data[1], data[3], data[2]], x_label='Number of Nodes with Black Balls, $m$', connect=True, y_label='$I_{' + str(time_limit) + '}$', file_name=scat_path, size=(10, 7.5),
from utilities import load_csv_col, fig_size from utilities.plotting import plot_scatter_data from numpy import array # Define constants base_path = '../../data/clique/' simple_path = base_path + 'simple.csv' single_path = base_path + 'simple_single.csv' popular_path = base_path + 'popular.csv' fig_path = '../../results/clique/compare.png' headers = ['Simple Clique', 'Popular Clique'] # Import data [simple_data], simple_nums = load_csv_col(simple_path, with_headers=True, parse=float) # [single_data], single_nums = load_csv_col(single_path, with_headers=True, parse=float) [popular_data], popular_nums = load_csv_col(popular_path, with_headers=True, parse=float) # Package data data = array([ [simple_nums, simple_data], # [single_nums, single_data], [popular_nums, popular_data] ]).astype(float)[:, :, 1:] # Plot data plot_scatter_data(data, multiple=True,