def pre_execute(self): """ Associate the assets to the sites and build the riskinputs. """ if 'hazard_curves' in self.oqparam.inputs: # read hazard from file haz_sitecol, haz_curves = readinput.get_hcurves(self.oqparam) self.read_exposure() # define .assets_by_site self.load_riskmodel() self.sitecol, self.assets_by_site = self.assoc_assets_sites( haz_sitecol) curves_by_trt_gsim = {(0, 'FromFile'): haz_curves} self.rlzs_assoc = logictree.trivial_rlzs_assoc() self.save_mesh() else: # compute hazard super(ClassicalRiskCalculator, self).pre_execute() logging.info('Preparing the risk input') curves_by_trt_gsim = {} for dset in self.datastore['curves_by_sm'].values(): for key, curves in dset.items(): trt_id, gsim = key.split('-') curves_by_trt_gsim[int(trt_id), gsim] = curves.value self.assetcol = riskinput.build_asset_collection( self.assets_by_site, self.oqparam.time_event) self.riskinputs = self.build_riskinputs(curves_by_trt_gsim) self.monitor.oqparam = self.oqparam self.N = sum(len(assets) for assets in self.assets_by_site) self.L = len(self.riskmodel.loss_types) self.R = len(self.rlzs_assoc.realizations) self.I = self.oqparam.insured_losses self.Q1 = len(self.oqparam.quantile_loss_curves) + 1
def pre_execute(self): """ Associate the assets to the sites and build the riskinputs. """ if 'hazard_curves' in self.oqparam.inputs: # read hazard from file haz_sitecol, haz_curves = readinput.get_hcurves(self.oqparam) self.save_params() self.read_exposure() # define .assets_by_site self.load_riskmodel() self.assetcol = riskinput.AssetCollection( self.assets_by_site, self.cost_calculator, self.oqparam.time_event) self.sitecol, self.assets_by_site = self.assoc_assets_sites( haz_sitecol) curves_by_trt_gsim = {(0, 'FromFile'): haz_curves} self.rlzs_assoc = logictree.trivial_rlzs_assoc() self.save_mesh() else: # compute hazard or read it from the datastore super(ClassicalRiskCalculator, self).pre_execute() logging.info('Preparing the risk input') curves_by_trt_gsim = {} for dset in self.datastore['curves_by_sm'].values(): for key, curves in dset.items(): trt_id, gsim = key.split('-') curves_by_trt_gsim[int(trt_id), gsim] = curves.value self.riskinputs = self.build_riskinputs(curves_by_trt_gsim) self.monitor.oqparam = self.oqparam self.N = sum(len(assets) for assets in self.assets_by_site) self.L = len(self.riskmodel.loss_types) self.R = len(self.rlzs_assoc.realizations) self.I = self.oqparam.insured_losses self.Q1 = len(self.oqparam.quantile_loss_curves) + 1
def pre_execute(self): if 'gmfs' in self.oqparam.inputs: self.pre_calculator = None base.RiskCalculator.pre_execute(self) self.monitor.consequence_models = riskmodels.get_risk_models( self.oqparam, 'consequence') if 'gmfs' in self.oqparam.inputs: self.rlzs_assoc = logictree.trivial_rlzs_assoc() self.etags, gmfs = base.get_gmfs(self.datastore) self.riskinputs = self.build_riskinputs(gmfs) self.monitor.taxonomies = sorted(self.taxonomies)
def pre_execute(self): """ Compute the GMFs, build the epsilons, the riskinputs, and a dictionary with the unit of measure, used in the export phase. """ if 'gmfs' in self.oqparam.inputs: self.pre_calculator = None base.RiskCalculator.pre_execute(self) logging.info('Building the epsilons') epsilon_matrix = self.make_eps( self.oqparam.number_of_ground_motion_fields) if 'gmfs' in self.oqparam.inputs: self.rlzs_assoc = logictree.trivial_rlzs_assoc() self.etags, gmfs = base.get_gmfs(self.datastore) self.riskinputs = self.build_riskinputs(gmfs, epsilon_matrix)
def get_gmfs(calc): """ :param calc: a ScenarioDamage or ScenarioRisk calculator :returns: a dictionary of gmfs """ if 'gmfs' in calc.oqparam.inputs: # from file logging.info('Reading gmfs from file') sitecol, calc.tags, gmfs_by_imt = readinput.get_gmfs(calc.oqparam) calc.save_params() # save number_of_ground_motion_fields and sites # reduce the gmfs matrices to the filtered sites for imt in calc.oqparam.imtls: gmfs_by_imt[imt] = gmfs_by_imt[imt][sitecol.indices] logging.info('Preparing the risk input') calc.rlzs_assoc = logictree.trivial_rlzs_assoc() return sitecol, {(0, 'FromFile'): gmfs_by_imt} # else from rupture gmf = calc.datastore['gmfs/col00'].value # NB: if the hazard site collection has N sites, the hazard # filtered site collection for the nonzero GMFs has N' <= N sites # whereas the risk site collection associated to the assets # has N'' <= N' sites if calc.datastore.parent: haz_sitecol = calc.datastore.parent['sitecol'] # N' values else: haz_sitecol = calc.sitecol risk_indices = set(calc.sitecol.indices) # N'' values N = len(haz_sitecol.complete) imt_dt = numpy.dtype([(imt, float) for imt in calc.oqparam.imtls]) gmf_by_idx = general.groupby(gmf, lambda row: row['idx']) R = len(gmf_by_idx) # build a matrix N x R for each GSIM realization gmfs = {(trt_id, gsim): numpy.zeros((N, R), imt_dt) for trt_id, gsim in calc.rlzs_assoc} for rupid, rows in sorted(gmf_by_idx.items()): assert len(haz_sitecol.indices) == len(rows), (len( haz_sitecol.indices), len(rows)) for sid, gmv in zip(haz_sitecol.indices, rows): if sid in risk_indices: for trt_id, gsim in gmfs: gmfs[trt_id, gsim][sid, rupid] = gmv[gsim] return haz_sitecol, gmfs