import get_data import pandas as pd import random as rdm static_networks = get_data.list_of_static_networks() for time_series in ['Poisson', 'Circadian', 'Bursty']: t_0 = 0 delta_t = 10**5 for data in static_networks: df = pd.read_csv('../Data/Static_networks/' + data + '_edgelist.csv') ID_list = list(set(pd.concat([df['ID1'], df['ID2']]))) # how long? N N = len(ID_list) total_number_of_contacts = sum(df['Weight']) S = [] for ID in ID_list: ID_df = df[(df['ID1'] == ID) | (df['ID2'] == ID)] S.append(int(sum(ID_df['Weight']))) contacts = {} for node in ID_list: contacts[node] = [] new_df = pd.DataFrame(columns=['ID1', 'ID2', 'start_time', 'end_time'])
import get_data import pandas as pd import pickle import numpy as np mean_weight_values={} weight_heterogeneity_values={} data_list=get_data.list_of_static_networks()+get_data.list_of_temporal_networks(bats=True) for data in data_list: df,t_0,delta_t,species,interaction,phi_zero=get_data.dataframe(data) ############################################################ if data=='bats': ID_list=list(set(df['ID1'])) else: ID_list=list(set(pd.concat([df['ID1'],df['ID2']]))) N=len(ID_list) #################################################################### K=[len(set(pd.concat([df[df['ID1']==ID]['ID2'],df[df['ID2']==ID]['ID1']]))) for ID in ID_list] S=[len(pd.concat([df[df['ID1']==ID]['ID2'],df[df['ID2']==ID]['ID1']])) for ID in ID_list] W=[] edge_list=[] for i,row in df.iterrows(): edge=tuple(sorted([row['ID1'],row['ID2']])) if edge not in edge_list: edge_list.append(edge)
# import matplotlib.pyplot as plt from scipy.special import hyp2f1 import pandas as pd import pickle as pk import get_data data_list = get_data.list_of_temporal_networks( bats=True) + get_data.list_of_static_networks() #+get_data.twitter() names = { 'R0_prediction': 'Prediction', 'phi': 'Social fluidity', 'population': 'Population size', 'degree': 'Mean degree', 'excess_degree': 'Excess degree', 'mean_strength': 'Mean strength', 'mean_weight': 'Mean edge weight', 'weight_heterogeneity': 'Edge weight heterogeneity', 'modularity': 'Modularity', 'clustering': 'Mean clustering' } #network_stats=['beta','Delta_I','population','mean_strength','degree','excess_degree','mean_weight','weight_heterogeneity','phi','R0_prediction','modularity','clustering'] network_stats = [ 'phi', 'epsilon', 'population', 'degree', 'excess_degree', 'mean_strength', 'mean_weight', 'weight_heterogeneity', 'modularity', 'clustering' ] R_star_range = [2, 3, 4] time_series_range = ['Poisson', 'Circadian', 'Bursty']
} names = { 'R0_prediction': 'Prediction', 'phi': 'Social fluidity', 'population': 'Population size', 'degree': 'Mean degree', 'excess_degree': 'Excess degree', 'mean_strength': 'Mean strength', 'mean_weight': 'Mean edge weight', 'weight_heterogeneity': 'Edge weight heterogeneity', 'modularity': 'Modularity', 'clustering': 'Mean clustering' } data_list = get_data.list_of_temporal_networks( bats=False) + get_data.list_of_static_networks() dic = pk.load(open('pickles/phi.p', 'rb')) phi = [dic[d] for d in data_list] dic = pk.load(open('pickles/mean_strength.p', 'rb')) strength = [dic[d] for d in data_list] dic = pk.load(open('pickles/population.p', 'rb')) population = [dic[d] for d in data_list] dic = pk.load(open('pickles/mean_weight.p', 'rb')) weight = [dic[d] for d in data_list] dic = pk.load(open('pickles/weight_heterogeneity.p', 'rb')) weight_heterogeneity = [dic[d] for d in data_list]
'Shark': '#6eb791', 'Bee': '#f29bed' } position = { 'Aggression': 0.5, 'Food sharing': 1.5, 'Antennal contact': 2.5, # 'Space sharing':3.5, 'Face-to-face': 3.5, 'Association': 4.5, 'Grooming': 5.5 } data_list = get_data.list_of_temporal_networks( bats=True) + get_data.list_of_static_networks() X = [] Y = [] fig = plt.figure(figsize=(12.1, 7.7)) ax = fig.add_subplot(111) #ax=plt.subplot2grid((10,20), (0, 0), colspan=10,rowspan=10) #algorithm to spread the data out x_value = {} y_value = {} int_type = {} spec_type = {} for data in data_list: