def get_hump_error_per_type(): query = """SELECT DISTINCT run.aggregate_id, aggregate_type.type_id FROM `run` LEFT OUTER JOIN aggregate_type ON run.aggregate_id = aggregate_type.aggregate_id WHERE dissipation < 0.01""" session = get_session() aggregate_ids = session.execute(query).fetchall() session.close() errors = { 1: [], 2: [], 3: [], 4: [], 5: [] } p = Progress(len(aggregate_ids)) p.start() cnt = 0 for aggregate_id,type_id in aggregate_ids: err,alpha = get_hump_info(aggregate_id) errors[type_id].append(err) cnt += 1 p.update(cnt) p.finish() x = errors.keys() y = [np.mean(yi) for yi in errors.values()] yerr = [np.std(yi) for yi in errors.values()] ymax = [np.max(yi) for yi in errors.values()] ymin = [np.min(yi) for yi in errors.values()] with file('./simulation_data/type_hump_error_ranges.bin','wb') as fp: pickle.dump(errors,fp) fig = pplot.figure() ax = fig.add_subplot(311) ax.bar(x,y,yerr=yerr,color='b') ax.set_ylabel("Avergage power law error") ax.set_xlabel("Type") ax = fig.add_subplot(312) ax.set_ylabel("Maximum power law error") ax.set_xlabel("Type") ax.bar(x,ymax, color='r') ax = fig.add_subplot(313) ax.set_ylabel("Minimum power law error") ax.set_xlabel("Type") ax.bar(x,ymin, color='g') pplot.show()
def run(self): agstart = time.time() for i in xrange(self.no_sims): logging.info("Going for simulation %d"%(i+1)) gc.collect() run_id = str(uuid4()) with DataContainer(self.config,run_id,self.aggregate_id) as dc: p = Progress(self.config['model']['no_steps']) model_class = None if(self.market_type == 1): logging.info("Using default Market") model_class = Market elif(self.market_type == 2): logging.info("Using ShuffleIRSMarket") model_class = ShuffleIRSMarket elif(self.market_type == 3): logging.info("Using SortedIRSMarket") model_class = SortedIRSMarket elif(self.market_type == 4): logging.info("Using RandomSortedIRSMarket") model_class = SortedRandomIRSMarket elif(self.market_type == 5): logging.info("Using RandomShuffleIRSMarket") model_class = ShuffleRandomIRSMarket elif(self.market_type == 6): logging.info("Using ConstantRandomShuffleIRSMarket") model_class = ConstShuffleIRSMarket elif(self.market_type == 7): logging.info("Using quick CRS-IRS-Mkt") model_class = sim else: raise "No such market type" p.start() start = time.time() with model_class(self.config['model'],dc,p.update) as m: m.run() t = time.time()-start p.finish() print "" logging.info("Run took %f seconds"%t) if(self.config['analysis']['do_analysis']): start = time.time() self.do_analysis(dc,run_id) t = time.time()-start logging.info("Analysis took %f seconds"%t) if(self.save_data): start = time.time() dc.save_data() t = time.time()-start logging.info("Saving data took %f seconds"%t) gc.collect() print "" print "" gc.collect() dt = (time.time() - agstart) / 60 logging.info("Experiment took %f minutes"%dt) if(self.config['aggregate']['do_aggregate'] and self.save_data): start = time.time() self.do_aggregate(dc,run_id) logging.info('Aggregation took %f seconds'%(time.time()-start))
s.save_gross_risk_for_avalanche_size = save_gross_risk_for_avalanche_size if (s.save_gross_risk_for_avalanche_size): s.gross_risk_per_avalanche_size = defaultdict(list) s.gross_risk_per_avalanche_size = defaultdict(list) if (s.save_avalanche_tree): os.makedirs(s.avalanche_tree_file_path) if (save_giant_component): s.giant_components = np.zeros(s.no_steps) start = time.time() p.start() s.run() p.finish() print print "Run took %d seconds" % (time.time() - start) if (save): print "Saving data" dc.save_defaults() dc.save_run() if s.save_avalanche_progression: print "Saving avalanche progression" file_path = './simulation_data/avalanche_progression/%s.bin' % dc.aggregate_id with file(file_path, 'wb') as fp: pickle.dump(s.avalanche_progressions, fp) pickle.dump(dcconfig, fp)
def do_run(steps, no_banks, threshold, max_tenure, max_irs_value, avalanche_fraction=0.9): #steps = 10000 save = False save_risk = False save_risk_avalanche_time_series = False save_dist = False save_giant_component = False save_avalanche_progression = False save_critical_info = False save_avalanche_tree = False save_degree_distribution = False no_connection_scatter_moments = 0 connection_scatter_moments = np.random.randint( 0, steps, no_connection_scatter_moments) seed = np.random.randint(0, 1000) dcconfig = { 'model': { 'no_banks': no_banks, 'no_steps': steps, 'threshold': threshold, 'sigma': 1, 'max_irs_value': max_irs_value, 'irs_threshold': -1, 'dissipation': 0.0, 'max_tenure': max_tenure }, 'analysis': { 'data_to_save': ['defaults'] }, 'file_root': './simulation_data/', 'market_type': 7, 'seed': seed } measure_no_steps = 2 * dcconfig['model']['max_tenure'] ########################################################################### dc = DataContainer(dcconfig, str(uuid4()), str(uuid4())) p = Progress(steps) s = sim(dcconfig['model'], dc, p.update, save_risk, save_dist, connection_scatter_moments, seed, avalanche_fraction=avalanche_fraction) s.save_degree_distribution = save_degree_distribution if (s.save_degree_distribution): s.degrees = np.zeros((steps, dcconfig['model']['no_banks'])) s.no_irs = np.zeros((steps, dcconfig['model']['no_banks'])) s.save_avalanche_progression = save_avalanche_progression s.save_risk_avalanche_time_series = save_risk_avalanche_time_series s.collect_critical_info = save_critical_info s.save_giant_component = save_giant_component s.save_avalanche_tree = save_avalanche_tree s.avalanche_tree_file_path = './simulation_data/trees/%s/' % dc.aggregate_id s.irs_creations = np.zeros(steps) s.irs_removals = np.zeros(steps) if (s.save_avalanche_tree): os.makedirs(s.avalanche_tree_file_path) if (save_giant_component): s.giant_components = np.zeros(s.no_steps) ########################################################################### start = time.time() p.start() tme, size = s.run() print p.finish() defaulting_bank = s.defaulting_bank_no start_at = tme - measure_no_steps + 1 print "Large enough avalanche found at %d of size %d" % (tme, size) print print "Run took %d seconds" % (time.time() - start) print print "Going for the analysis" ########################################################################### ## Actual stuff thats needed dc = DataContainer(dcconfig, str(uuid4()), str(uuid4())) p = Progress(steps) s = sim(dcconfig['model'], dc, p.update, save_risk, save_dist, connection_scatter_moments, seed, start_at, defaulting_bank, avalanche_fraction=avalanche_fraction) nb = dcconfig['model']['no_banks'] s.measured_balances = np.zeros((measure_no_steps, nb)) s.measured_gross_balances = np.zeros((measure_no_steps, nb)) s.degrees = np.zeros((measure_no_steps, nb)) s.no_irs = np.zeros((measure_no_steps, nb)) #s.giant_component = [] s.defaulted_nodes = [] s.irs_pb = [] s.network = np.zeros((nb, nb)) s.irs_creations = np.zeros(steps) s.irs_removals = np.zeros(steps) ################# s.save_degree_distribution = save_degree_distribution s.save_avalanche_progression = save_avalanche_progression s.save_risk_avalanche_time_series = save_risk_avalanche_time_series s.collect_critical_info = save_critical_info s.save_giant_component = save_giant_component s.save_avalanche_tree = save_avalanche_tree s.avalanche_tree_file_path = './simulation_data/trees/%s/' % dc.aggregate_id if (s.save_avalanche_tree): os.makedirs(s.avalanche_tree_file_path) if (save_giant_component): s.giant_components = np.zeros(s.no_steps) ########################################################################### start = time.time() p.start() tme, size = s.run() p.finish() print print "Large enough avalanche found at %d of size %d" % (tme, size) if s.save_avalanche_progression: print "Saving avalanche progression" file_path = './simulation_data/avalanche_progression/%s.bin' % dc.aggregate_id with file(file_path, 'wb') as fp: pickle.dump(s.avalanche_progressions, fp) pickle.dump(dcconfig, fp) if s.collect_critical_info: print "Critical info" file_path = './simulation_data/critical/%s.bin' % dc.aggregate_id with file(file_path, 'wb') as fp: pickle.dump(s.critical_info, fp) pickle.dump(s.max_default_size_t.tolist(), fp) if (s.save_giant_component): pickle.dump(s.giant_components.tolist(), fp) pickle.dump(dcconfig, fp) if len(connection_scatter_moments) > 0: print "Connection Scatters" file_path = './simulation_data/connection_scatters/%s.bin' % dc.aggregate_id with file(file_path, 'wb') as fp: pickle.dump(s.connection_scatters, fp) if save_dist: file_path = './simulation_data/dists/%s.bin' % dc.aggregate_id with file(file_path, 'wb') as fp: pickle.dump(s.trials, fp) pickle.dump(dcconfig['model']['no_banks'], fp) if (True): os.makedirs("./simulation_data/large_avalanche_data/%s" % dc.aggregate_id) print "Saving stuff" file_path = './simulation_data/large_avalanche_data/%s/degrees.bin' % dc.aggregate_id with file(file_path, 'wb') as fp: pickle.dump(s.degrees.tolist(), fp) file_path = './simulation_data/large_avalanche_data/%s/no_irs.bin' % dc.aggregate_id with file(file_path, 'wb') as fp: pickle.dump(s.no_irs.tolist(), fp) pickle.dump(s.irs_pb, fp) file_path = './simulation_data/large_avalanche_data/%s/balances.bin' % dc.aggregate_id with file(file_path, 'wb') as fp: pickle.dump(s.measured_balances.tolist(), fp) pickle.dump(s.measured_gross_balances.tolist(), fp) #file_path = './simulation_data/large_avalanche_data/%s/gc.bin'%dc.aggregate_id #with file(file_path,'wb') as fp: # pickle.dump(s.giant_component,fp) file_path = './simulation_data/large_avalanche_data/%s/network.bin' % dc.aggregate_id with file(file_path, 'wb') as fp: pickle.dump(s.network.tolist(), fp) file_path = './simulation_data/large_avalanche_data/%s/defaulted.bin' % dc.aggregate_id with file(file_path, 'wb') as fp: pickle.dump(s.defaulted_nodes, fp) file_path = './simulation_data/large_avalanche_data/%s/irs_data.bin' % dc.aggregate_id with file(file_path, 'wb') as fp: pickle.dump(s.irs_creations.tolist(), fp) pickle.dump(s.irs_removals.tolist(), fp) dcconfig['failed_bank'] = s.defaulting_bank_no file_path = './simulation_data/large_avalanche_data/%s/config.json' % dc.aggregate_id with open(file_path, 'w') as fp: json.dump(dcconfig, fp, indent=4) print dc.aggregate_id