def export_avglosses(ekey, dstore): unit_by_lt = { riskmodels.cost_type_to_loss_type(ct['name']): ct['unit'] for ct in dstore['cost_types'] } unit_by_lt['fatalities'] = 'people' rlzs = dstore['rlzs_assoc'].realizations avglosses = dstore['avglosses'] riskmodel = dstore['riskmodel'] assets = dstore['assetcol']['asset_ref'] N, L, R = avglosses.shape fnames = [] for l, r in itertools.product(range(L), range(R)): rlz = rlzs[r] lt = riskmodel.loss_types[l] unit = unit_by_lt[lt] suffix = '' if L == 1 and R == 1 else '-gsimltp_%s_%s' % (rlz.uid, lt) losses = [ PerAssetLoss(lt, unit, ass, stat['mean'], stat['stddev']) for ass, stat in zip(assets, avglosses[:, l, r]) ] out = export_loss_csv(('avg', 'csv'), dstore.export_dir, losses, suffix) fnames.append(out) return sorted(fnames)
def export_risk(ekey, dstore): """ Export the loss curves of a given realization in CSV format. :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oqparam = dstore['oqparam'] unit_by_lt = {riskmodels.cost_type_to_loss_type(ct['name']): ct['unit'] for ct in dstore['cost_types']} unit_by_lt['fatalities'] = 'people' rlzs = dstore['rlzs_assoc'].realizations losses_by_key = dstore['losses_by_key'] fnames = [] for i in sorted(losses_by_key): rlz = rlzs[i] result = losses_by_key[i] suffix = '' if rlz.uid == '*' else '-gsimltp_%s' % rlz.uid losses = AccumDict() for key, values in result.iteritems(): key_type, loss_type = key unit = unit_by_lt[loss_type] if key_type in ('agg', 'ins'): mean, std = scientific.mean_std(values) losses += {key_type: [ AggLoss(loss_type, unit, mean, std)]} else: losses += {key_type: [ PerAssetLoss(loss_type, unit, *vals) for vals in values]} for key_type in losses: out = export_loss_csv((key_type, 'csv'), oqparam.export_dir, losses[key_type], suffix) fnames.append(out) return sorted(fnames)
def export_loss_maps_xml_geojson(ekey, dstore): oq = OqParam.from_(dstore.attrs) unit_by_lt = { riskmodels.cost_type_to_loss_type(ct['name']): ct['unit'] for ct in dstore['cost_types'] } unit_by_lt['fatalities'] = 'people' rlzs = dstore['rlzs_assoc'].realizations loss_maps = dstore[ekey[0]] riskmodel = dstore['riskmodel'] assetcol = dstore['assetcol'] R = len(rlzs) sitemesh = dstore['sitemesh'] L = len(riskmodel.loss_types) fnames = [] export_type = ekey[1] writercls = (risk_writers.LossMapGeoJSONWriter if export_type == 'geojson' else risk_writers.LossMapXMLWriter) loss_types = [ cb.loss_type for cb in riskmodel.curve_builders if cb.user_provided ] for lt in loss_types: loss_maps_lt = loss_maps[lt] for r in range(R): lmaps = loss_maps_lt[:, r] for p, poe in enumerate(oq.conditional_loss_poes): for insflag in range(oq.insured_losses + 1): ins = '_ins' if insflag else '' rlz = rlzs[r] unit = unit_by_lt[lt] suffix = '' if L == 1 and R == 1 else '-gsimltp_%s_%s' % ( rlz.uid, lt) root = ekey[0][:-5] # strip -rlzs name = '%s%s-poe-%s%s.%s' % (root, suffix, poe, ins, ekey[1]) fname = dstore.export_path(name) data = [] poe_str = 'poe~%s' % poe + ins for ass, stat in zip(assetcol, lmaps[poe_str]): loc = Location(sitemesh[ass['site_id']]) lm = LossMap(loc, ass['asset_ref'], stat, None) data.append(lm) writer = writercls(fname, oq.investigation_time, poe=poe, loss_type=lt, unit=unit, **get_paths(rlz)) writer.serialize(data) fnames.append(fname) return sorted(fnames)
def export_loss_map_xml_geojson(ekey, dstore): oq = OqParam.from_(dstore.attrs) unit_by_lt = { riskmodels.cost_type_to_loss_type(ct['name']): ct['unit'] for ct in dstore['cost_types'] } unit_by_lt['fatalities'] = 'people' rlzs = dstore['rlzs_assoc'].realizations loss_map = dstore[ekey[0]] riskmodel = dstore['riskmodel'] assetcol = dstore['assetcol'] R = len(rlzs) sitemesh = dstore['sitemesh'] L = len(riskmodel.loss_types) fnames = [] export_type = ekey[1] writercls = (risk_writers.LossMapGeoJSONWriter if export_type == 'geojson' else risk_writers.LossMapXMLWriter) loss_types = riskmodel.loss_types for lt in loss_types: alosses = loss_map[lt] for ins in range(oq.insured_losses + 1): means = alosses['mean' + ('_ins' if ins else '')] stddevs = alosses['stddev' + ('_ins' if ins else '')] for r in range(R): rlz = rlzs[r] unit = unit_by_lt[lt] suffix = '' if L == 1 and R == 1 else '-gsimltp_%s_%s' % ( rlz.uid, lt) root = ekey[0][:-5] # strip -rlzs name = '%s%s%s.%s' % (root, suffix, '_ins' if ins else '', ekey[1]) fname = dstore.export_path(name) data = [] for ass, mean, stddev in zip(assetcol, means[:, r], stddevs[:, r]): loc = Location(sitemesh[ass['site_id']]) lm = LossMap(loc, ass['asset_ref'], mean, stddev) data.append(lm) writer = writercls(fname, oq.investigation_time, poe=None, loss_type=lt, gsim_tree_path=rlz.uid, unit=unit) writer.serialize(data) fnames.append(fname) return sorted(fnames)
def export_lossmaps_xml_geojson(ekey, dstore): oq = OqParam.from_(dstore.attrs) unit_by_lt = { riskmodels.cost_type_to_loss_type(ct['name']): ct['unit'] for ct in dstore['cost_types'] } unit_by_lt['fatalities'] = 'people' rlzs = dstore['rlzs_assoc'].realizations avglosses = dstore[ekey[0]] riskmodel = dstore['riskmodel'] assetcol = dstore['assetcol'] sitemesh = dstore['sitemesh'] L = len(riskmodel.loss_types) N, R = avglosses.shape fnames = [] export_type = ekey[1] writercls = (risk_writers.LossMapGeoJSONWriter if export_type == 'geojson' else risk_writers.LossMapXMLWriter) for l, lt in enumerate(riskmodel.loss_types): alosses = avglosses[lt] for r in range(R): rlz = rlzs[r] unit = unit_by_lt[lt] suffix = '' if L == 1 and R == 1 else '-gsimltp_%s_%s' % (rlz.uid, lt) name = '%s%s.%s' % (ekey[0], suffix, ekey[1]) fname = dstore.export_path(name) data = [] for ass, stat in zip(assetcol, alosses[:, r]): loc = Location(sitemesh[ass['site_id']]) lm = LossMap(loc, ass['asset_ref'], stat['mean'], stat['stddev']) data.append(lm) writer = writercls(fname, oq.investigation_time, poe=None, loss_type=lt, gsim_tree_path=None, unit=unit, loss_category=None) # TODO: replace the category with the exposure category writer.serialize(data) fnames.append(fname) return sorted(fnames)
def pre_execute(self): """ Compute the GMFs, build the epsilons, the riskinputs, and a dictionary with the unit of measure, used in the export phase. """ super(ScenarioRiskCalculator, self).pre_execute() logging.info('Computing the GMFs') gmfs_by_imt = calc.calc_gmfs(self.oqparam, self.sitecol) logging.info('Preparing the risk input') self.riskinputs = self.build_riskinputs(gmfs_by_imt) # build the epsilon matrix and add the epsilons to the assets num_samples = self.oqparam.number_of_ground_motion_fields seed = getattr(self.oqparam, 'master_seed', 42) correlation = getattr(self.oqparam, 'asset_correlation', 0) add_epsilons(self.assets_by_site, num_samples, seed, correlation) self.unit = {riskmodels.cost_type_to_loss_type(ct['name']): ct['unit'] for ct in self.exposure.cost_types} self.unit['fatalities'] = 'people'
def export_agglosses(ekey, dstore): unit_by_lt = {riskmodels.cost_type_to_loss_type(ct['name']): ct['unit'] for ct in dstore['cost_types']} unit_by_lt['fatalities'] = 'people' rlzs = dstore['rlzs_assoc'].realizations agglosses = dstore[ekey[0]] riskmodel = dstore['riskmodel'] L = len(riskmodel.loss_types) R, = agglosses.shape fnames = [] for lt in riskmodel.loss_types: for r in range(R): rlz = rlzs[r] unit = unit_by_lt[lt] suffix = '' if L == 1 and R == 1 else '-gsimltp_%s_%s' % ( rlz.uid, lt) loss = agglosses[r][lt] losses = [AggLoss(lt, unit, loss['mean'], loss['stddev'])] out = export_loss_csv(('agg', 'csv'), dstore, losses, suffix) fnames.append(out) return sorted(fnames)
def export_lossmaps_xml_geojson(ekey, dstore): oq = OqParam.from_(dstore.attrs) unit_by_lt = {riskmodels.cost_type_to_loss_type(ct['name']): ct['unit'] for ct in dstore['cost_types']} unit_by_lt['fatalities'] = 'people' rlzs = dstore['rlzs_assoc'].realizations avglosses = dstore[ekey[0]] riskmodel = dstore['riskmodel'] assetcol = dstore['assetcol'] sitemesh = dstore['sitemesh'] L = len(riskmodel.loss_types) N, R = avglosses.shape fnames = [] export_type = ekey[1] writercls = (risk_writers.LossMapGeoJSONWriter if export_type == 'geojson' else risk_writers.LossMapXMLWriter) for l, lt in enumerate(riskmodel.loss_types): alosses = avglosses[lt] for r in range(R): rlz = rlzs[r] unit = unit_by_lt[lt] suffix = '' if L == 1 and R == 1 else '-gsimltp_%s_%s' % ( rlz.uid, lt) name = '%s%s.%s' % (ekey[0], suffix, ekey[1]) fname = dstore.export_path(name) data = [] for ass, stat in zip(assetcol, alosses[:, r]): loc = Location(sitemesh[ass['site_id']]) lm = LossMap(loc, ass['asset_ref'], stat['mean'], stat['stddev']) data.append(lm) writer = writercls( fname, oq.investigation_time, poe=None, loss_type=lt, gsim_tree_path=None, unit=unit, loss_category=None) # TODO: replace the category with the exposure category writer.serialize(data) fnames.append(fname) return sorted(fnames)
def export_agglosses(ekey, dstore): unit_by_lt = { riskmodels.cost_type_to_loss_type(ct['name']): ct['unit'] for ct in dstore['cost_types'] } unit_by_lt['fatalities'] = 'people' rlzs = dstore['rlzs_assoc'].realizations agglosses = dstore[ekey[0]] riskmodel = dstore['riskmodel'] L = len(riskmodel.loss_types) R, = agglosses.shape fnames = [] for lt in riskmodel.loss_types: for r in range(R): rlz = rlzs[r] unit = unit_by_lt[lt] suffix = '' if L == 1 and R == 1 else '-gsimltp_%s_%s' % (rlz.uid, lt) loss = agglosses[r][lt] losses = [AggLoss(lt, unit, loss['mean'], loss['stddev'])] out = export_loss_csv(('agg', 'csv'), dstore, losses, suffix) fnames.append(out) return sorted(fnames)
def export_lossmaps_xml(ekey, dstore): oq = OqParam.from_(dstore.attrs) unit_by_lt = { riskmodels.cost_type_to_loss_type(ct['name']): ct['unit'] for ct in dstore['cost_types'] } unit_by_lt['fatalities'] = 'people' rlzs = dstore['rlzs_assoc'].realizations avglosses = dstore['avglosses'] riskmodel = dstore['riskmodel'] assetcol = dstore['assetcol'] sitemesh = dstore['sitemesh'] N, L, R = avglosses.shape fnames = [] for l, r in itertools.product(range(L), range(R)): rlz = rlzs[r] lt = riskmodel.loss_types[l] unit = unit_by_lt[lt] suffix = '' if L == 1 and R == 1 else '-gsimltp_%s_%s' % (rlz.uid, lt) fname = os.path.join(dstore.export_dir, '%s%s.%s' % (ekey[0], suffix, ekey[1])) data = [] for ass, stat in zip(assetcol, avglosses[:, l, r]): loc = Location(sitemesh[ass['site_id']]) lm = LossMap(loc, ass['asset_ref'], stat['mean'], stat['stddev']) data.append(lm) writer = risk_writers.LossMapXMLWriter(fname, oq.investigation_time, poe=None, loss_type=lt, gsim_tree_path=None, unit=unit, loss_category=None) # TODO: replace the category with the exposure category writer.serialize(data) fnames.append(fname) return sorted(fnames)
def export_risk(ekey, dstore): """ Export the loss curves of a given realization in CSV format. :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ oqparam = dstore['oqparam'] unit_by_lt = { riskmodels.cost_type_to_loss_type(ct['name']): ct['unit'] for ct in dstore['cost_types'] } unit_by_lt['fatalities'] = 'people' rlzs = dstore['rlzs_assoc'].realizations losses_by_key = dstore['losses_by_key'] fnames = [] for i in sorted(losses_by_key): rlz = rlzs[i] result = losses_by_key[i] suffix = '' if rlz.uid == '*' else '-gsimltp_%s' % rlz.uid losses = AccumDict() for key, values in result.items(): key_type, loss_type = key unit = unit_by_lt[loss_type] if key_type in ('agg', 'ins'): mean, std = scientific.mean_std(values) losses += {key_type: [AggLoss(loss_type, unit, mean, std)]} else: losses += { key_type: [PerAssetLoss(loss_type, unit, *vals) for vals in values] } for key_type in losses: out = export_loss_csv((key_type, 'csv'), oqparam.export_dir, losses[key_type], suffix) fnames.append(out) return sorted(fnames)
def export_avglosses(ekey, dstore): unit_by_lt = {riskmodels.cost_type_to_loss_type(ct['name']): ct['unit'] for ct in dstore['cost_types']} unit_by_lt['fatalities'] = 'people' rlzs = dstore['rlzs_assoc'].realizations avglosses = dstore[ekey[0]] riskmodel = dstore['riskmodel'] assets = dstore['assetcol']['asset_ref'] N, R = avglosses.shape L = len(riskmodel.loss_types) fnames = [] for l, lt in enumerate(riskmodel.loss_types): alosses = avglosses[lt] for r in range(R): rlz = rlzs[r] lt = riskmodel.loss_types[l] unit = unit_by_lt[lt] suffix = '' if L == 1 and R == 1 else '-gsimltp_%s_%s' % ( rlz.uid, lt) losses = [PerAssetLoss(lt, unit, ass, stat['mean'], stat['stddev']) for ass, stat in zip(assets, alosses[:, r])] out = export_loss_csv(('avg', 'csv'), dstore, losses, suffix) fnames.append(out) return sorted(fnames)
def _gen_writers(dstore, writercls, root): # build XMLWriter instances oq = OqParam.from_(dstore.attrs) rlzs = dstore['rlzs_assoc'].realizations cost_types = dstore['cost_types'] L, R = len(cost_types), len(rlzs) for l, ct in enumerate(cost_types): loss_type = riskmodels.cost_type_to_loss_type(ct['name']) for ins in range(oq.insured_losses + 1): if root.endswith('-rlzs'): for rlz in rlzs: suffix = '' if L == 1 and R == 1 else '-gsimltp_%s_%s' % ( rlz.uid, loss_type) dest = dstore.export_path( '%s%s%s.xml' % (root[:-5], suffix, '_ins' if ins else '')) yield writercls(dest, oq.investigation_time, loss_type, unit=ct['unit'], **get_paths(rlz)), (loss_type, rlz.ordinal, ins) elif root.endswith('-stats'): pairs = [('mean', None)] + [('quantile-%s' % q, q) for q in oq.quantile_loss_curves] for ordinal, (statname, statvalue) in enumerate(pairs): dest = dstore.export_path('%s-%s-%s%s.xml' % (root[:-6], statname, loss_type, '_ins' if ins else '')) yield writercls( dest, oq.investigation_time, loss_type, statistics='mean' if ordinal == 0 else 'quantile', quantile_value=statvalue, unit=ct['unit']), (loss_type, ordinal, ins)