print i ecosystem.update_world() dict_stats = get_eco_state_row(ITERATIONS, ecosystem) series_counts[i] = ecosystem.populations if HABITAT_LOSS and i == HABITAT_LOSS_ITER: ecosystem.apply_habitat_loss() if i%1000 == 0 or i == ITERATIONS: net_temp = ecosystem.realised_net.copy() series = copy(series_counts) write_adjacency_matrix(i, NETWORK_RECORD, series, net_temp, output_dir) ecosystem.clear_realised_network() ## WARNING: TO USE OR NOT TO USE!! write_spatial_state(ecosystem, i, output_dir) ## WARNING: THIS HAPPENS EVERY ITERATION ############################################## ### OUTPUT: ## testing output of adjacency and spatial state. Do they require more than the following? #net_temp = ecosystem.realised_net.copy() #series = copy(series_counts) #write_adjacency_matrix(ITERATIONS, NETWORK_RECORD, series, net_temp, output_dir) #write_spatial_state(ecosystem, ITERATIONS, output_dir) header_names = ['species', 'init_tl', 'mutualist', 'mutualistic_producer', 'individuals_init', 'immigrants', 'born', 'dead', 'individuals_final'] file_species = open(output_dir+'/output_species.csv', 'w') out_species = csv.DictWriter(file_species, header_names) out_species.writeheader() out_row_species = dict()
else: thread = ThreadNetStats(network_stats_lock, out, net_temp, i, NETWORK_RECORD, series_counts, eco_thread, INT_STRENGTHS) threads.append(thread) thread.start() ecosystem.clear_realised_network() ##calculate spatial variation metrics ## and save spatial state of system (for further spatial analysis after simulation) if SPATIAL_VARIATION and (i % RECORD_SPATIAL_VAR == 0 or i == ITERATIONS): start = datetime.now() write_spatial_analysis(ecosystem, i) write_spatial_state(ecosystem, i) stop = datetime.now() elapsed = stop - start print elapsed for t in threads: t.join() file_net.close() file_eco.close() # here we write the output file for the species populations dynamics header_names = ['iteration'] for sp in sorted(ecosystem.species): header_names.append(sp)
areas_counts) else: NetStats(out, net_temp, i, NETWORK_RECORD, series_counts, INT_STRENGTHS, output_dir) ecosystem.clear_realised_network() if HABITAT_LOSS and i == HABITAT_LOSS_ITER: ecosystem.apply_habitat_loss() ##calculate spatial variation metrics if SPATIAL_VARIATION and (i % RECORD_SPATIAL_VAR == 0 or i == ITERATIONS): start = datetime.now() write_spatial_analysis(ecosystem, i, output_dir) write_spatial_state(ecosystem, i, output_dir) stop = datetime.now() elapsed = stop - start print(elapsed) file_net.close() file_eco.close() # here we write the output file for the species populations dynamics header_names = ['iteration'] for sp in sorted(ecosystem.species): header_names.append(sp) file_populations = open(output_dir + '/output_populations.csv', 'w') out_populations = csv.DictWriter(file_populations, header_names)
print i ecosystem.update_world() # dict_stats = get_eco_state_row(ITERATIONS, ecosystem) dict_stats = get_eco_state_row(ITERATIONS, ecosystem) # series_counts[1] = ecosystem.populations series_counts[i] = ecosystem.populations if HABITAT_LOSS and i == HABITAT_LOSS_ITER: ecosystem.apply_habitat_loss() if i % 1000 == 0 or i == ITERATIONS: net_temp = ecosystem.realised_net.copy() series = copy(series_counts) write_adjacency_matrix(i, NETWORK_RECORD, series, net_temp, output_dir) write_spatial_state(ecosystem, i, output_dir) ### OUTPUT: ## testing output of adjacency and spatial state. Do they require more than the following? # net_temp = ecosystem.realised_net.copy() # series = copy(series_counts) # write_adjacency_matrix(ITERATIONS, NETWORK_RECORD, series, net_temp, output_dir) # write_spatial_state(ecosystem, ITERATIONS, output_dir) header_names = [ "species", "init_tl", "mutualist", "mutualistic_producer", "individuals_init", "immigrants",
##here we obtain the trophic position of each species so at the end we can calculate ##its mean and standard deviation for the species statistics (- as noted in README, this does not work for large networks) tps, a, b = net_temp.find_trophic_positions() for sp in cumulative_sps_stats.keys(): if cumulative_sps_stats[sp]['tps'] == 0: cumulative_sps_stats[sp]['tps'] = [] if tps.has_key(sp): cumulative_sps_stats[sp]['tps'].append(tps[sp]) if SPATIAL_VARIATION: NetStats(out, net_temp, i, NETWORK_RECORD, series_counts, INT_STRENGTHS, output_dir, centroids_counts, areas_counts) else: NetStats(out, net_temp, i, NETWORK_RECORD, series_counts, INT_STRENGTHS, output_dir) write_spatial_state(ecosystem,i, output_dir) ## save the spatial state of the system in the case the SPATIAL_VARIATION is switched off. ecosystem.clear_realised_network() if HABITAT_LOSS and i == HABITAT_LOSS_ITER: ecosystem.apply_habitat_loss() ##calculate spatial variation metrics if SPATIAL_VARIATION and (i%RECORD_SPATIAL_VAR == 0 or i == ITERATIONS): start = datetime.now() write_spatial_analysis(ecosystem, i, output_dir) #write_spatial_state(ecosystem,i, output_dir) stop = datetime.now() elapsed = stop-start print elapsed