print 'new connectance = ', net.connectance() else: net = obtain_interactions_network() net_to_save = net.copy() nx.write_graphml(net_to_save, network_file) ecosystem = Ecosystem(net, drawing=False) ecosystem.initialise_world(True) #ecosystem.draw_species_distribution() out_row = get_out_row(0, net, '', 0, '', '') out.writerow(out_row) # iteration_to_reset = (int) (math.ceil(ITERATIONS*NETWORK_RESET)) out_row_eco = get_eco_state_row(0, ecosystem) out_eco.writerow(out_row_eco) # print ecosystem.get_groups_counts() # plot_series = [] # plot_prods = [] # plot_mut_prods = [] # plot_herbs = [] # plot_muts = [] # plot_prim = [] # plot_sec = [] # # plot_prods_rep = [] # plot_mut_prods_rep = [] # plot_herbs_rep = [] # plot_muts_rep = []
if u in basal_sps and v in top_preds and tls[v] == 3: net.remove_edge(u,v) print 'new connectance = ', net.connectance() if not READ_FILE_NETWORK: net_to_save = net.copy() nx.write_graphml(net_to_save, network_file) ############################################## ecosystem = Ecosystem(net, drawing=False) ecosystem.initialise_world(True) # here it works out the inital populations series_counts = dict() dict_stats = get_eco_state_row(0, ecosystem) series_counts[0] = ecosystem.populations # we don't this to store data, just for its keys cumulative_sps_stats = dict.fromkeys(net.nodes(), None) ############################################## for i in range(1, ITERATIONS+1): print i ecosystem.update_world() dict_stats = get_eco_state_row(ITERATIONS, ecosystem) series_counts[i] = ecosystem.populations
if u in basal_sps and v in top_preds and tls[v] == 3: net.remove_edge(u,v) print 'new connectance = ', net.connectance() if not READ_FILE_NETWORK: net_to_save = net.copy() nx.write_graphml(net_to_save, network_file) ################################################################################################################## ecosystem = Ecosystem(net, drawing=False) ecosystem.initialise_world(True) out_row = get_out_row(0, net, '', 0, '', '') out.writerow(out_row) out_row_eco = get_eco_state_row(0, ecosystem) out_eco.writerow(out_row_eco) series_counts = dict() if SPATIAL_VARIATION: centroids_counts = dict() areas_counts = dict() ##this structure holds the numbers of immigration, birth and dead of individuals ##for each species during the last ITERATIONS_TO_RECORD iterations cumulative_sps_stats = dict.fromkeys(net.nodes(), None) stats = ['immigrants', 'born', 'dead', 'tps'] for sp in cumulative_sps_stats.keys(): cumulative_sps_stats[sp] = dict.fromkeys(stats, 0) threshold_iter = math.ceil(ITERATIONS - (ITERATIONS*ITERATIONS_TO_RECORD))