def export_curves_rlzs(self, aids, key): """ :returns: a dictionary key -> record of dtype loss_curve_dt """ if 'loss_curves-rlzs' in self.dstore: # classical_risk data = self.dstore['loss_curves-rlzs'][aids] # shape (A, R) if key.startswith('rlz-'): rlzi = int(key[4:]) return {key: data[:, rlzi]} # else key == 'rlzs', returns all data return {'rlz-%03d' % rlzi: data[:, rlzi] for rlzi in range(self.R)} # otherwise event_based crm = riskinput.read_composite_risk_model(self.dstore) builder = crm.curve_builder avalues = self.assetcol.values(aids) lrgetter = riskinput.LossRatiosGetter(self.dstore, aids) if key.startswith('rlz-'): rlzi = int(key[4:]) ratios = lrgetter.get(rlzi) return {'rlz-%03d' % rlzi: builder.build_curves(avalues, ratios)} else: # key is 'rlzs', return a dictionary will all realizations # this may be disabled in the future unless an asset is specified dic = {} for rlzi in range(self.R): dic['rlz-%03d' % rlzi] = builder.build_curves( avalues, lrgetter.get(rlzi)) return dic
def export_rcurves_rlzs(ekey, dstore): oq = dstore['oqparam'] riskmodel = riskinput.read_composite_risk_model(dstore) assetcol = dstore['assetcol'] aref = dstore['asset_refs'].value kind = ekey[0].split('-')[1] # rlzs or stats if oq.avg_losses: acurves = dstore['avg_losses-' + kind] rcurves = dstore[ekey[0]] [loss_ratios] = dstore['loss_ratios'] fnames = [] writercls = (risk_writers.LossCurveGeoJSONWriter if ekey[0] == 'geojson' else risk_writers.LossCurveXMLWriter) for writer, (ltype, poe, r, ins) in _gen_writers( dstore, writercls, ekey[0]): if ltype not in loss_ratios.dtype.names: continue # ignore loss type l = riskmodel.lti[ltype] poes = rcurves[ltype][:, r, ins] curves = [] for aid, ass in enumerate(assetcol): loc = Location(*ass.location) losses = loss_ratios[ltype] * ass.value(ltype) # -1 means that the average was not computed avg = acurves[aid, r, l][ins] if oq.avg_losses else -1 curve = LossCurve(loc, aref[ass.idx], poes[aid], losses, loss_ratios[ltype], avg, None) curves.append(curve) writer.serialize(curves) fnames.append(writer._dest) return sorted(fnames)
def load_riskmodel(self): """ Read the risk model and set the attribute .riskmodel. The riskmodel can be empty for hazard calculations. Save the loss ratios (if any) in the datastore. """ logging.info('Reading the risk model if present') self.riskmodel = readinput.get_risk_model(self.oqparam) if not self.riskmodel: parent = self.datastore.parent if 'fragility' in parent or 'vulnerability' in parent: self.riskmodel = riskinput.read_composite_risk_model(parent) return self.save_params() # re-save oqparam
def load_riskmodel(self): # to be called before read_exposure # NB: this is called even if there is no risk model """ Read the risk model and set the attribute .riskmodel. The riskmodel can be empty for hazard calculations. Save the loss ratios (if any) in the datastore. """ logging.info('Reading the risk model if present') self.riskmodel = readinput.get_risk_model(self.oqparam) if not self.riskmodel: parent = self.datastore.parent if 'fragility' in parent or 'vulnerability' in parent: self.riskmodel = riskinput.read_composite_risk_model(parent) return if self.oqparam.ground_motion_fields and not self.oqparam.imtls: raise InvalidFile('No intensity_measure_types specified in %s' % self.oqparam.inputs['job_ini']) self.save_params() # re-save oqparam
def load_riskmodel(self): """ Read the risk model and set the attribute .riskmodel. The riskmodel can be empty for hazard calculations. Save the loss ratios (if any) in the datastore. """ logging.info('Reading the risk model if present') self.riskmodel = rm = readinput.get_risk_model(self.oqparam) if not self.riskmodel: parent = self.datastore.parent if 'composite_risk_model' in parent: self.riskmodel = riskinput.read_composite_risk_model(parent) return self.save_params() # re-save oqparam # save the risk models and loss_ratios in the datastore self.datastore['composite_risk_model'] = rm attrs = self.datastore.getitem('composite_risk_model').attrs attrs['min_iml'] = hdf5.array_of_vstr(sorted(rm.get_min_iml().items())) self.datastore.set_nbytes('composite_risk_model') self.datastore.hdf5.flush()