def get_type(aggregate_id): query = """SELECT type_id FROM aggregate_type WHERE aggregate_id = :agg """ session = get_session() t = session.execute(query, {'agg': aggregate_id}).first()[0] session.close() return t
def get_aggregate_ids(): query = """SELECT DISTINCT aggregate_id FROM run WHERE no_banks >= 200 """ session = get_session() agg_ids = session.execute( query).fetchall() session.close() return [a[0] for a in agg_ids]
def save_default_participants(self): dps = [] for did in self.defaults: for (bank_id, balance, root) in self.default_participants[did]: dps.append( BankDefaultModel(did, bank_id, balance, root, self.run_id)) prev = 0 for x in range(0, len(dps), 40000): ses = get_session() ses.bulk_save_objects(dps[prev:x]) prev = x ses.commit() ses.close if (len(dps[prev:]) > 0): ses = get_session() ses.bulk_save_objects(dps[x:]) ses.commit() ses.close()
def save_banks(self): bnks = [] if (len(self.banks) > 0): for bank in self.banks: b = BankModel(bank, self.run_id, self.banks[bank]) bnks.append(b) ses = get_session() ses.bulk_save_objects(bnks) ses.commit() ses.close()
def save_swaps(self): swp_objs = [] for sid in self.swaps: (float_id, fix_id, value, start, end, tenure) = self.swaps[sid] swp_objs.append( SwapModel(sid, value, float_id, fix_id, start, end, tenure, self.run_id)) prev = 0 for x in range(0, len(swp_objs), 40000): ses = get_session() ses.bulk_save_objects(swp_objs[prev:x]) ses.commit() ses.close() prev = x if (len(swp_objs[prev:]) > 0): ses = get_session() ses.bulk_save_objects(swp_objs[x:]) ses.commit() ses.close()
def get_data(no_banks): query = """ SELECT DISTINCT r.aggregate_id, r.threshold, da.size FROM run r INNER JOIN default_aggregate da ON da.aggregate_id = r.aggregate_id WHERE r.no_banks = :no_banks ORDER BY r.aggregate_id, size """ session = get_session() data = session.execute(query, {'no_banks': no_banks}).fetchall() session.close() return data
def get_aggregate_id(b, t, irst, ten): print b, t, irst, ten query = """SELECT aggregate_id FROM run WHERE threshold = :threshold and no_banks = :no_banks and max_irs_value = :irs_val and max_tenure = :ten LIMIT 1""" session = get_session() agg_id = session.execute(query, { 'threshold': t, 'no_banks': b, 'irs_val': irst, 'ten': ten }).first()[0] session.close() return agg_id
def save_defaults(self): print "Saving avalanche data" query = """INSERT INTO default_aggregate (aggregate_id, frequency, size) VALUES (:aggregate_id, :freq, :size) ON DUPLICATE KEY UPDATE frequency = frequency + :freq""" session = get_session() for dkey in self.defaults: session.execute( query, { 'aggregate_id': self.aggregate_id, 'freq': self.defaults[dkey], 'size': dkey }) session.commit() session.close()
def get_no_defaults(no_banks, threshold, irs_value, tenure): query = """SELECT r.aggregate_id, max_tenure, max_irs_value, threshold, sum(da.frequency) FROM run as r INNER JOIN default_aggregate as da ON da.aggregate_id = r.aggregate_id WHERE no_banks = :no_banks AND threshold = :threshold AND max_irs_value = :max_irs_value AND max_tenure = :max_tenure GROUP BY r.aggregate_id, r.max_irs_value, r.max_tenure, r.threshold """ session = get_session() info = session.execute( query, { 'no_banks': no_banks, 'max_tenure': tenure, 'threshold': threshold, 'max_irs_value': irs_value, }).first() session.close() return info
def save_run(self): ses = get_session() avgswaps = len(self.swaps) / (self.config['model']['no_steps'] * self.config['model']['no_banks']) run = RunModel(self.run_id, self.aggregate_id, self.config['model']['no_steps'], self.config['model']['no_banks'], self.config['model']['sigma'], self.config['model']['irs_threshold'], self.config['model']['max_irs_value'], self.config['model']['max_tenure'], self.config['model']['threshold'], self.time_stamp, self.seed, self.config['market_type'], avgswaps, len(self.swaps), self.config['model']['dissipation']) ses.bulk_save_objects([run]) ses.commit() ses.close()
"""Checking if we could create some collapse based on size. (hardcoded data in here)""" from __future__ import division import math import numpy as np import matplotlib.pyplot as plt from collections import defaultdict from data.models import SwapModel, DefaultModel, RunModel, get_session from heatmap import get_runs, get_defaults if __name__ == '__main__': root_path = "./data_collapse" session = get_session() aggregate_ids = ['0772393d-881a-4895-9b8e-f9857a34aefc', '67638748-c9d1-49ec-8b09-731a8a5cc383', '98367a5d-4a69-444b-9011-148174cb94c3', '5a846dd4-266e-414f-96ea-1b5fbf1c05c7', '8ca7029f-7f6e-41e2-8775-2655a34b9fec'] fig = plt.figure() ax = fig.add_subplot(1,1,1) ax.set_xscale('log') ax.set_yscale('log') ax.set_xlim((0.01,1)) cnt = 0 exponent = 2.55 for aggregate_id in aggregate_ids:
""" Comparing distributions (powerlaw, lognormal and exponential). """ from __future__ import division import numpy as np import matplotlib.pyplot as pplot import powerlaw from analyse_hump import * from data.models import get_session if __name__ == '__main__': ses = get_session() aggs = ses.execute( 'SELECT DISTINCT aggregate_id FROM run WHERE aggregate_id = \'452b894a-2aff-4d36-a58c-b15bb0219d7a\'' ).fetchall() ses.close() pvalues_lognormal = np.zeros(len(aggs)) Rvalues_lognormal = np.zeros(len(aggs)) pvalues_lognormal_pos = np.zeros(len(aggs)) Rvalues_lognormal_pos = np.zeros(len(aggs)) pvalues_exponential = np.zeros(len(aggs)) Rvalues_exponential = np.zeros(len(aggs)) for i, res in enumerate(aggs): aggregate_id = res[0]