def extract_dmg_by_asset_npz(dstore, what): damage_dt = build_damage_dt(dstore) rlzs = dstore['csm_info'].get_rlzs_assoc().realizations data = dstore['dmg_by_asset'] assets = util.get_assets(dstore) for rlz in rlzs: dmg_by_asset = build_damage_array(data[:, rlz.ordinal], damage_dt) yield 'rlz-%03d' % rlz.ordinal, util.compose_arrays( assets, dmg_by_asset)
def export_loss_maps_csv(ekey, dstore): rlzs = dstore['csm_info'].get_rlzs_assoc().realizations assets = get_assets(dstore) value = get_loss_maps(dstore, 'rlzs') writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) for rlz, values in zip(rlzs, value.T): fname = dstore.build_fname('loss_maps', rlz, ekey[1]) writer.save(compose_arrays(assets, values), fname) return writer.getsaved()
def export_rlzs_by_asset_csv(ekey, dstore): rlzs = dstore['csm_info'].get_rlzs_assoc().realizations assets = get_assets(dstore) value = dstore[ekey[0]].value # matrix N x R or T x R writer = writers.CsvWriter(fmt=FIVEDIGITS) for rlz, values in zip(rlzs, value.T): fname = dstore.build_fname(ekey[0], rlz.gsim_rlz, ekey[1]) writer.save(compose_arrays(assets, values), fname) return writer.getsaved()
def view_mean_avg_losses(token, dstore): try: array = dstore['avg_losses-stats'] # shape (N, S) data = array[:, 0] except KeyError: array = dstore['avg_losses-rlzs'] # shape (N, R) data = array[:, 0] assets = util.get_assets(dstore) losses = util.compose_arrays(assets, data) losses.sort() return rst_table(losses, fmt=FIVEDIGITS)
def export_rlzs_by_asset_csv(ekey, dstore): rlzs = dstore['csm_info'].get_rlzs_assoc().realizations assets = get_assets(dstore) R = len(rlzs) value = dstore[ekey[0]].value # matrix N x R or T x R writer = writers.CsvWriter(fmt='%9.6E') for rlz, values in zip(rlzs, value.T): suffix = '.csv' if R == 1 else '-gsimltp_%s.csv' % rlz.uid fname = dstore.export_path(ekey[0] + suffix) writer.save(compose_arrays(assets, values), fname) return writer.getsaved()
def export_dmg_by_asset_csv(ekey, dstore): damage_dt = build_damage_dt(dstore) rlzs = dstore['csm_info'].get_rlzs_assoc().realizations data = dstore[ekey[0]] writer = writers.CsvWriter(fmt='%.6E') assets = get_assets(dstore) for rlz in rlzs: dmg_by_asset = build_damage_array(data[:, rlz.ordinal], damage_dt) fname = dstore.build_fname(ekey[0], rlz.gsim_rlz, ekey[1]) writer.save(compose_arrays(assets, dmg_by_asset), fname) return writer.getsaved()
def view_mean_avg_losses(token, dstore): dt = dstore['oqparam'].multiloss_dt() try: array = dstore['avg_losses-stats'] # shape (N, S) except KeyError: array = dstore['avg_losses-rlzs'] # shape (N, R) data = numpy.array([tuple(row) for row in array[:, 0]], dt) assets = util.get_assets(dstore) losses = util.compose_arrays(assets, data) losses.sort() return rst_table(losses, fmt=FIVEDIGITS)
def export_rcurves(ekey, dstore): rlzs = dstore['csm_info'].get_rlzs_assoc().realizations assets = get_assets(dstore) curves = compactify(dstore[ekey[0]].value) name = ekey[0].split('-')[0] writer = writers.CsvWriter(fmt='%9.7E') for rlz in rlzs: array = compose_arrays(assets, curves[:, rlz.ordinal]) path = dstore.export_path('%s-%s.csv' % (name, rlz.uid)) writer.save(array, path) return writer.getsaved()
def export_rcurves(ekey, dstore): rlzs = dstore['csm_info'].get_rlzs_assoc().realizations assets = get_assets(dstore) curves = compactify(dstore[ekey[0]].value) name = ekey[0].split('-')[0] writer = writers.CsvWriter(fmt=FIVEDIGITS) for rlz in rlzs: array = compose_arrays(assets, curves[:, rlz.ordinal]) path = dstore.build_fname(name, rlz, 'csv') writer.save(array, path) return writer.getsaved()
def export_dmg_by_asset_csv(ekey, dstore): damage_dt = build_damage_dt(dstore) rlzs = dstore['rlzs_assoc'].realizations data = dstore[ekey[0]] writer = writers.CsvWriter(fmt='%.6E') assets = get_assets(dstore) for rlz in rlzs: gsim, = rlz.value dmg_by_asset = build_damage_array(data[:, rlz.ordinal], damage_dt) fname = dstore.export_path('%s-%s.%s' % (ekey[0], gsim, ekey[1])) writer.save(compose_arrays(assets, dmg_by_asset), fname) return writer.getsaved()
def export_loss_curves(ekey, dstore): rlzs = dstore['csm_info'].get_rlzs_assoc().realizations loss_types = dstore.get_attr('composite_risk_model', 'loss_types') assets = get_assets(dstore) curves = dstore[ekey[0]] name = ekey[0].split('-')[0] writer = writers.CsvWriter(fmt='%9.6E') for rlz in rlzs: for ltype in loss_types: array = compose_arrays(assets, curves[ltype][:, rlz.ordinal]) path = dstore.export_path('%s-%s-%s.csv' % (name, ltype, rlz.uid)) writer.save(array, path) return writer.getsaved()
def export_loss_maps_csv(ekey, dstore): kind = ekey[0].split('-')[1] # rlzs or stats assets = get_assets(dstore) value = get_loss_maps(dstore, kind) if kind == 'rlzs': tags = dstore['csm_info'].get_rlzs_assoc().realizations else: oq = dstore['oqparam'] tags = ['mean'] + ['quantile-%s' % q for q in oq.quantiles] writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) for i, tag in enumerate(tags): fname = dstore.build_fname('loss_maps', tag, ekey[1]) writer.save(compose_arrays(assets, value[:, i]), fname) return writer.getsaved()
def export_avg_losses(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ avg_losses = dstore[ekey[0]].value rlzs = dstore['csm_info'].get_rlzs_assoc().realizations assets = get_assets(dstore) writer = writers.CsvWriter(fmt=FIVEDIGITS) for rlz in rlzs: losses = avg_losses[:, rlz.ordinal] dest = dstore.export_path('losses_by_asset-rlz%03d.csv' % rlz.ordinal) data = compose_arrays(assets, losses) writer.save(data, dest) return writer.getsaved()
def export_dmg_by_asset_csv(ekey, dstore): E = len(dstore['events']) oq = dstore['oqparam'] damage_dt = build_damage_dt(dstore, mean_std=E > 1) rlzs = dstore['csm_info'].get_rlzs_assoc().realizations data = dstore[ekey[0]] writer = writers.CsvWriter(fmt='%.6E') assets = get_assets(dstore) for rlz in rlzs: if oq.modal_damage_state: dmg_by_asset = modal_damage_array(data[:, rlz.ordinal], damage_dt) else: dmg_by_asset = build_damage_array(data[:, rlz.ordinal], damage_dt) fname = dstore.build_fname(ekey[0], rlz, ekey[1]) writer.save(compose_arrays(assets, dmg_by_asset), fname) return writer.getsaved()
def export_avg_losses_stats(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] avg_losses = dstore[ekey[0]].value quantiles = ['mean'] + ['quantile-%s' % q for q in oq.quantile_loss_curves] assets = get_assets(dstore) writer = writers.CsvWriter(fmt=FIVEDIGITS) for i, quantile in enumerate(quantiles): losses = avg_losses[:, i] dest = dstore.export_path('avg_losses-%s.csv' % quantile) data = compose_arrays(assets, losses) writer.save(data, dest) return writer.getsaved()
def export_losses_by_asset(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ loss_dt = dstore['oqparam'].loss_dt(stat_dt) losses_by_asset = dstore[ekey[0]].value rlzs = dstore['csm_info'].get_rlzs_assoc().realizations assets = get_assets(dstore) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) for rlz in rlzs: losses = losses_by_asset[:, rlz.ordinal] dest = dstore.build_fname('losses_by_asset', rlz, 'csv') data = compose_arrays(assets, losses.copy().view(loss_dt)[:, 0]) writer.save(data, dest) return writer.getsaved()
def export_damages_csv(ekey, dstore): rlzs = dstore['csm_info'].get_rlzs_assoc().realizations oq = dstore['oqparam'] loss_types = oq.loss_dt().names assets = get_assets(dstore) value = dstore[ekey[0]].value # matrix N x R x LI or T x R x LI writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) if ekey[0].endswith('stats'): tags = ['mean'] + ['quantile-%s' % q for q in oq.quantiles] else: tags = ['rlz-%03d' % r for r in range(len(rlzs))] for lti, lt in enumerate(loss_types): for tag, values in zip(tags, value[:, :, lti].T): fname = dstore.build_fname('damages-%s' % lt, tag, ekey[1]) writer.save(compose_arrays(assets, values), fname) return writer.getsaved()
def export_loss_maps_npz(ekey, dstore): kind = ekey[0].split('-')[1] # rlzs or stats assets = get_assets(dstore) value = get_loss_maps(dstore, kind) R = dstore['csm_info'].get_num_rlzs() if kind == 'rlzs': tags = ['rlz-%03d' % r for r in range(R)] else: oq = dstore['oqparam'] tags = ['mean'] + ['quantile-%s' % q for q in oq.quantiles] fname = dstore.export_path('%s.%s' % ekey) dic = {} for i, tag in enumerate(tags): dic[tag] = compose_arrays(assets, value[:, i]) savez(fname, **dic) return [fname]
def view_loss_curves_avg(token, dstore): """ Returns the average losses computed from the loss curves; for each asset shows all realizations. """ array = dstore['loss_curves-rlzs'].value # shape (N, R) n, r = array.shape lt_dt = numpy.dtype([(lt, numpy.float32, r) for lt in array.dtype.names]) avg = numpy.zeros(n, lt_dt) for lt in array.dtype.names: array_lt = array[lt] for i, row in enumerate(array_lt): avg[lt][i] = row['avg'] assets = util.get_assets(dstore) losses = util.compose_arrays(assets, avg) return rst_table(losses, fmt='%8.6E')
def export_bcr_map(ekey, dstore): oq = dstore['oqparam'] assets = get_assets(dstore) bcr_data = dstore[ekey[0]] N, R = bcr_data.shape if ekey[0].endswith('stats'): tags = ['mean'] + ['quantile-%s' % q for q in oq.quantiles] else: tags = ['rlz-%03d' % r for r in range(R)] fnames = [] writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) for t, tag in enumerate(tags): path = dstore.build_fname('bcr', tag, 'csv') writer.save(compose_arrays(assets, bcr_data[:, t]), path) fnames.append(path) return writer.getsaved()
def export_avg_losses_ebrisk(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ name = ekey[0] oq = dstore['oqparam'] dt = oq.loss_dt() value = dstore[name].value # shape (A, L) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) assets = get_assets(dstore) dest = dstore.build_fname(name, 'mean', 'csv') array = numpy.zeros(len(value), dt) for l, lt in enumerate(dt.names): array[lt] = value[:, l] writer.save(compose_arrays(assets, array), dest) return writer.getsaved()
def export_avg_losses(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ avg_losses = dstore[ekey[0]].value # shape (A, R, L, I) oq = dstore['oqparam'] dt = oq.loss_dt() assets = get_assets(dstore) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) for name, tag, data in _gen_triple( ekey[0], avg_losses, oq.quantile_loss_curves, oq.insured_losses): losses = numpy.array([tuple(row) for row in data], dt) dest = dstore.build_fname(name, tag, 'csv') data = compose_arrays(assets, losses) writer.save(data, dest) return writer.getsaved()
def export_loss_maps_csv(ekey, dstore): kind = ekey[0].split('-')[1] # rlzs or stats assets = get_assets(dstore) value = get_loss_maps(dstore, kind) oq = dstore['oqparam'] if kind == 'rlzs': tags = dstore['csm_info'].get_rlzs_assoc().realizations else: tags = ['mean'] + ['quantile-%s' % q for q in oq.quantiles] writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) md = dstore.metadata for i, tag in enumerate(tags): uid = getattr(tag, 'uid', tag) fname = dstore.build_fname('loss_maps', tag, ekey[1]) md.update( dict(kind=uid, risk_investigation_time=oq.risk_investigation_time)) writer.save(compose_arrays(assets, value[:, i]), fname, comment=md) return writer.getsaved()
def export_avg_losses(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ dskey = ekey[0] oq = dstore['oqparam'] dt = oq.loss_dt() name, value, tags = _get_data(dstore, dskey, oq.hazard_stats().items()) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) assets = get_assets(dstore) for tag, values in zip(tags, value.transpose(1, 0, 2)): dest = dstore.build_fname(name, tag, 'csv') array = numpy.zeros(len(values), dt) for l, lt in enumerate(dt.names): array[lt] = values[:, l] writer.save(compose_arrays(assets, array), dest) return writer.getsaved()
def export_losses_by_asset(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] loss_dt = oq.loss_dt(stat_dt) losses_by_asset = dstore[ekey[0]][()] rlzs = dstore['csm_info'].get_rlzs_assoc().realizations assets = get_assets(dstore) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) md = dstore.metadata md.update( dict(investigation_time=oq.investigation_time, risk_investigation_time=oq.risk_investigation_time)) for rlz in rlzs: losses = losses_by_asset[:, rlz.ordinal] dest = dstore.build_fname('losses_by_asset', rlz, 'csv') data = compose_arrays(assets, losses.copy().view(loss_dt)[:, 0]) writer.save(data, dest, comment=md, renamedict=dict(id='asset_id')) return writer.getsaved()
def export_loss_maps_csv(ekey, dstore): kind = ekey[0].split('-')[1] # rlzs or stats assets = get_assets(dstore) value = get_loss_maps(dstore, kind) oq = dstore['oqparam'] if kind == 'rlzs': tags = dstore['csm_info'].get_rlzs_assoc().realizations else: tags = oq.hazard_stats() writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) md = dstore.metadata for i, tag in enumerate(tags): uid = getattr(tag, 'uid', tag) fname = dstore.build_fname('loss_maps', tag, ekey[1]) md.update( dict(kind=uid, risk_investigation_time=oq.risk_investigation_time)) writer.save(compose_arrays(assets, value[:, i]), fname, comment=md, renamedict=dict(id='asset_id')) return writer.getsaved()
def export_loss_maps_csv(ekey, dstore): kind = ekey[0].split('-')[1] # rlzs or stats assets = get_assets(dstore) value = get_loss_maps(dstore, kind) oq = dstore['oqparam'] if kind == 'rlzs': rlzs_or_stats = dstore['full_lt'].get_realizations() else: rlzs_or_stats = oq.hazard_stats() writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) md = dstore.metadata for i, ros in enumerate(rlzs_or_stats): if hasattr(ros, 'ordinal'): # is a realization ros = 'rlz-%d' % ros.ordinal fname = dstore.build_fname('loss_maps', ros, ekey[1]) md.update( dict(kind=ros, risk_investigation_time=oq.risk_investigation_time or oq.investigation_time)) writer.save(compose_arrays(assets, value[:, i]), fname, comment=md, renamedict=dict(id='asset_id')) return writer.getsaved()
def extract_losses_by_asset(dstore, what): loss_dt = dstore['oqparam'].loss_dt() rlzs = dstore['full_lt'].get_realizations() assets = util.get_assets(dstore) if 'losses_by_asset' in dstore: losses_by_asset = dstore['losses_by_asset'][()] for rlz in rlzs: # I am exporting the 'mean' and ignoring the 'stddev' losses = cast(losses_by_asset[:, rlz.ordinal]['mean'], loss_dt) data = util.compose_arrays(assets, losses) yield 'rlz-%03d' % rlz.ordinal, data elif 'avg_losses-stats' in dstore: aw = hdf5.ArrayWrapper.from_(dstore['avg_losses-stats']) for s, stat in enumerate(aw.stat): losses = cast(aw[:, s], loss_dt) data = util.compose_arrays(assets, losses) yield stat, data elif 'avg_losses-rlzs' in dstore: # there is only one realization avg_losses = dstore['avg_losses-rlzs'][()] losses = cast(avg_losses, loss_dt) data = util.compose_arrays(assets, losses) yield 'rlz-000', data
def export_avg_losses(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ dskey = ekey[0] oq = dstore['oqparam'] dt = [(ln, F32) for ln in oq.loss_names] name, value, tags = _get_data(dstore, dskey, oq.hazard_stats()) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) assets = get_assets(dstore) md = dstore.metadata md.update( dict(investigation_time=oq.investigation_time, risk_investigation_time=oq.risk_investigation_time)) for tag, values in zip(tags, value.transpose(1, 0, 2)): dest = dstore.build_fname(name, tag, 'csv') array = numpy.zeros(len(values), dt) for li, ln in enumerate(oq.loss_names): array[ln] = values[:, li] writer.save(compose_arrays(assets, array), dest, comment=md) return writer.getsaved()
def extract_losses_by_asset(dstore, what): loss_dt = dstore['oqparam'].loss_dt() rlzs = dstore['csm_info'].get_rlzs_assoc().realizations assets = util.get_assets(dstore) if 'losses_by_asset' in dstore: losses_by_asset = dstore['losses_by_asset'][()] for rlz in rlzs: # I am exporting the 'mean' and ignoring the 'stddev' losses = cast(losses_by_asset[:, rlz.ordinal]['mean'], loss_dt) data = util.compose_arrays(assets, losses) yield 'rlz-%03d' % rlz.ordinal, data elif 'avg_losses-stats' in dstore: avg_losses = dstore['avg_losses-stats'][()] stats = decode(dstore['avg_losses-stats'].attrs['stats']) for s, stat in enumerate(stats): losses = cast(avg_losses[:, s], loss_dt) data = util.compose_arrays(assets, losses) yield stat, data elif 'avg_losses-rlzs' in dstore: # there is only one realization avg_losses = dstore['avg_losses-rlzs'][()] losses = cast(avg_losses, loss_dt) data = util.compose_arrays(assets, losses) yield 'rlz-000', data
def extract_losses_by_asset(dstore, what): loss_dt = dstore['oqparam'].loss_dt() rlzs = dstore['csm_info'].get_rlzs_assoc().realizations assets = util.get_assets(dstore) if 'losses_by_asset' in dstore: losses_by_asset = dstore['losses_by_asset'][()] for rlz in rlzs: # I am exporting the 'mean' and ignoring the 'stddev' losses = cast(losses_by_asset[:, rlz.ordinal]['mean'], loss_dt) data = util.compose_arrays(assets, losses) yield 'rlz-%03d' % rlz.ordinal, data elif 'avg_losses-stats' in dstore: avg_losses = dstore['avg_losses-stats'][()] stats = dstore['avg_losses-stats'].attrs['stats'] for s, stat in enumerate(stats): losses = cast(avg_losses[:, s], loss_dt) data = util.compose_arrays(assets, losses) yield stat, data elif 'avg_losses-rlzs' in dstore: # there is only one realization avg_losses = dstore['avg_losses-rlzs'][()] losses = cast(avg_losses, loss_dt) data = util.compose_arrays(assets, losses) yield 'rlz-000', data
def export_avg_losses(ekey, dstore): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oq = dstore['oqparam'] dt = oq.loss_dt() assets = get_assets(dstore) writer = writers.CsvWriter(fmt=writers.FIVEDIGITS) name, kind = ekey[0].split('-') value = dstore[name + '-rlzs'].value # shape (A, R, L') if kind == 'stats': weights = dstore['realizations']['weight'] tags, stats = zip(*oq.risk_stats()) value = compute_stats2(value, stats, weights) else: # rlzs tags = ['rlz-%03d' % r for r in range(len(dstore['realizations']))] for tag, values in zip(tags, value.transpose(1, 0, 2)): dest = dstore.build_fname(name, tag, 'csv') array = numpy.zeros(len(values), dt) for l, lt in enumerate(dt.names): array[lt] = values[:, l] writer.save(compose_arrays(assets, array), dest) return writer.getsaved()