def pre_execute(self): logging.warn('%s is still experimental', self.__class__.__name__) base.RiskCalculator.pre_execute(self) logging.info('Building the epsilons') oq = self.oqparam A = len(self.assetcol) E = oq.number_of_ground_motion_fields if oq.ignore_covs: eps = numpy.zeros((A, E), numpy.float32) else: eps = self.make_eps(E) self.datastore['eids'], gmfs = calc.get_gmfs(self.datastore, self.precalc) self.riskinputs = self.build_riskinputs('gmf', gmfs, eps) self.param['assetcol'] = self.assetcol self.param['insured_losses'] = oq.insured_losses self.param['avg_losses'] = oq.avg_losses self.param['asset_loss_table'] = oq.asset_loss_table or oq.loss_ratios self.taskno = 0 self.start = 0 self.R = len(gmfs) self.L = len(self.riskmodel.lti) self.T = len(self.assetcol.taxonomies) self.A = len(self.assetcol) self.I = I = self.oqparam.insured_losses + 1 self.datastore.create_dset('losses_by_taxon-rlzs', F32, (self.T, self.R, self.L * I)) avg_losses = self.oqparam.avg_losses if avg_losses: self.dset = self.datastore.create_dset( 'avg_losses-rlzs', F32, (self.A, self.R, self.L * I))
def export_gmf_data_csv(ekey, dstore): oq = dstore['oqparam'] if 'scenario' in oq.calculation_mode: imtls = dstore['oqparam'].imtls rlzs = dstore['csm_info'].get_rlzs_assoc().realizations gsims = [str(rlz.gsim_rlz) for rlz in rlzs] n_gmfs = oq.number_of_ground_motion_fields fields = ['%03d' % i for i in range(n_gmfs)] dt = numpy.dtype([(f, F32) for f in fields]) etags, gmfs_ = calc.get_gmfs(dstore) sitemesh = get_mesh(dstore['sitecol']) writer = writers.CsvWriter(fmt='%.5f') for gsim, gmfa in zip(gsims, gmfs_): # gmfa of shape (N, E) for imt in imtls: gmfs = numpy.zeros(len(gmfa), dt) for i in range(len(gmfa)): gmfs[i] = tuple(gmfa[imt][i]) dest = dstore.build_fname('gmf', '%s-%s' % (gsim, imt), 'csv') data = util.compose_arrays(sitemesh, gmfs) writer.save(data, dest) return writer.getsaved() else: # event based exporter = GmfExporter(dstore) sm_id, eid = get_sm_id_eid(ekey[0]) if eid is None: logging.info('Exporting only the first event') logging.info('Use the command `oq export gmf_data:*:* %d` ' 'to export everything', dstore.calc_id) return exporter.export_one(0, 0) elif eid == '*': return exporter.export_all() else: return exporter.export_one(int(sm_id), int(eid))
def export_gmf_spec(ekey, dstore, spec): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object :param spec: a string specifying what to export exactly """ oq = dstore['oqparam'] eids = numpy.array([int(rid) for rid in spec.split(',')]) sitemesh = dstore['sitemesh'] writer = writers.CsvWriter(fmt='%.5f') etags = dstore['etags'] if 'scenario' in oq.calculation_mode: _, gmfs_by_trt_gsim = calc.get_gmfs(dstore) gsims = sorted(gsim for trt, gsim in gmfs_by_trt_gsim) imts = gmfs_by_trt_gsim[0, gsims[0]].dtype.names gmf_dt = numpy.dtype([(str(gsim), F32) for gsim in gsims]) for eid in eids: etag = etags[eid] for imt in imts: gmfa = numpy.zeros(len(sitemesh), gmf_dt) for gsim in gsims: gmfa[str(gsim)] = gmfs_by_trt_gsim[0, gsim][imt][:, eid] dest = dstore.export_path('gmf-%s-%s.csv' % (etag, imt)) data = util.compose_arrays(sitemesh, gmfa) writer.save(data, dest) else: # event based for eid in eids: etag = etags[eid] for gmfa, imt in _get_gmfs(dstore, util.get_serial(etag), eid): dest = dstore.export_path('gmf-%s-%s.csv' % (etag, imt)) data = util.compose_arrays(sitemesh, gmfa) writer.save(data, dest) return writer.getsaved()
def export_gmf_spec(ekey, dstore, spec): """ :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object :param spec: a string specifying what to export exactly """ oq = dstore['oqparam'] eids = numpy.array([int(rid) for rid in spec.split(',')]) sitemesh = get_mesh(dstore['sitecol']) writer = writers.CsvWriter(fmt='%.5f') etags = dstore['etags'] if 'scenario' in oq.calculation_mode: _, gmfs_by_trt_gsim = calc.get_gmfs(dstore) gsims = sorted(gsim for trt, gsim in gmfs_by_trt_gsim) imts = gmfs_by_trt_gsim[0, gsims[0]].dtype.names gmf_dt = numpy.dtype([(str(gsim), F32) for gsim in gsims]) for eid in eids: etag = etags[eid] for imt in imts: gmfa = numpy.zeros(len(sitemesh), gmf_dt) for gsim in gsims: gmfa[str(gsim)] = gmfs_by_trt_gsim[0, gsim][imt][:, eid] dest = dstore.export_path('gmf-%s-%s.csv' % (etag, imt)) data = util.compose_arrays(sitemesh, gmfa) writer.save(data, dest) else: # event based for eid in eids: etag = etags[eid] for gmfa, imt in _get_gmfs(dstore, util.get_serial(etag), eid): dest = dstore.export_path('gmf-%s-%s.csv' % (etag, imt)) data = util.compose_arrays(sitemesh, gmfa) writer.save(data, dest) return writer.getsaved()
def pre_execute(self): """ Compute the GMFs, build the epsilons, the riskinputs, and a dictionary with the unit of measure, used in the export phase. """ if 'gmfs' in self.oqparam.inputs: self.pre_calculator = None base.RiskCalculator.pre_execute(self) logging.info('Building the epsilons') A = len(self.assetcol) E = self.oqparam.number_of_ground_motion_fields if self.oqparam.ignore_covs: eps = numpy.zeros((A, E), numpy.float32) else: eps = self.make_eps(E) self.datastore['etags'], gmfs = calc.get_gmfs(self.datastore, self.precalc) hazard_by_rlz = { rlz: gmfs[rlz.ordinal] for rlz in self.rlzs_assoc.realizations } self.riskinputs = self.build_riskinputs('gmf', hazard_by_rlz, eps) self.param['number_of_ground_motion_fields'] = E self.param['insured_losses'] = self.oqparam.insured_losses self.param['asset_loss_table'] = self.oqparam.asset_loss_table
def pre_execute(self): if 'gmfs' in self.oqparam.inputs: self.pre_calculator = None base.RiskCalculator.pre_execute(self) self.monitor.consequence_models = riskmodels.get_risk_models( self.oqparam, 'consequence') self.etags, gmfs = calc.get_gmfs(self.datastore) self.riskinputs = self.build_riskinputs(gmfs) self.monitor.taxonomies = sorted(self.taxonomies)
def pre_execute(self): if 'gmfs' in self.oqparam.inputs: self.pre_calculator = None base.RiskCalculator.pre_execute(self) self.monitor.consequence_models = riskmodels.get_risk_models( self.oqparam, 'consequence') self.datastore['etags'], gmfs = calc.get_gmfs(self.datastore) self.riskinputs = self.build_riskinputs(gmfs) self.monitor.taxonomies = sorted(self.taxonomies)
def pre_execute(self): if 'gmfs' in self.oqparam.inputs: self.pre_calculator = None base.RiskCalculator.pre_execute(self) self.param['number_of_ground_motion_fields'] = ( self.oqparam.number_of_ground_motion_fields) self.param['consequence_models'] = riskmodels.get_risk_models( self.oqparam, 'consequence') _, gmfs = calc.get_gmfs(self.datastore, self.precalc) self.riskinputs = self.build_riskinputs('gmf', gmfs) self.param['taxonomies'] = sorted(self.taxonomies)
def pre_execute(self): if 'gmfs' in self.oqparam.inputs: self.pre_calculator = None base.RiskCalculator.pre_execute(self) self.monitor.consequence_models = riskmodels.get_risk_models( self.oqparam, 'consequence') self.datastore['etags'], gmfs = calc.get_gmfs( self.datastore, self.precalc) rlzs = self.csm_info.get_rlzs_assoc().realizations self.riskinputs = self.build_riskinputs( {rlz: gmf for rlz, gmf in zip(rlzs, gmfs)}) self.monitor.taxonomies = sorted(self.taxonomies)
def pre_execute(self): """ Compute the GMFs, build the epsilons, the riskinputs, and a dictionary with the unit of measure, used in the export phase. """ if 'gmfs' in self.oqparam.inputs: self.pre_calculator = None base.RiskCalculator.pre_execute(self) logging.info('Building the epsilons') epsilon_matrix = self.make_eps( self.oqparam.number_of_ground_motion_fields) self.datastore['etags'], gmfs = calc.get_gmfs(self.datastore) self.riskinputs = self.build_riskinputs(gmfs, epsilon_matrix)
def pre_execute(self): """ Compute the GMFs, build the epsilons, the riskinputs, and a dictionary with the unit of measure, used in the export phase. """ if 'gmfs' in self.oqparam.inputs: self.pre_calculator = None base.RiskCalculator.pre_execute(self) logging.info('Building the epsilons') epsilon_matrix = self.make_eps( self.oqparam.number_of_ground_motion_fields) self.etags, gmfs = calc.get_gmfs(self.datastore) self.riskinputs = self.build_riskinputs(gmfs, epsilon_matrix)
def pre_execute(self): if 'gmfs' in self.oqparam.inputs: self.pre_calculator = None base.RiskCalculator.pre_execute(self) self.param['number_of_ground_motion_fields'] = ( self.oqparam.number_of_ground_motion_fields) self.param['consequence_models'] = riskmodels.get_risk_models( self.oqparam, 'consequence') self.datastore['etags'], gmfs = calc.get_gmfs(self.datastore, self.precalc) rlzs = self.csm_info.get_rlzs_assoc().realizations self.riskinputs = self.build_riskinputs( 'gmf', {rlz: gmf for rlz, gmf in zip(rlzs, gmfs)}) self.param['taxonomies'] = sorted(self.taxonomies)
def pre_execute(self): """ Compute the GMFs, build the epsilons, the riskinputs, and a dictionary with the unit of measure, used in the export phase. """ if 'gmfs' in self.oqparam.inputs: self.pre_calculator = None base.RiskCalculator.pre_execute(self) logging.info('Building the epsilons') if self.oqparam.ignore_covs: eps = None else: eps = self.make_eps(self.oqparam.number_of_ground_motion_fields) self.datastore['etags'], gmfs = calc.get_gmfs( self.datastore, self.precalc) hazard_by_rlz = {rlz: gmfs[rlz.ordinal] for rlz in self.rlzs_assoc.realizations} self.riskinputs = self.build_riskinputs(hazard_by_rlz, eps)
def export_gmf_data_csv(ekey, dstore): oq = dstore['oqparam'] rlzs_assoc = dstore['csm_info'].get_rlzs_assoc() if 'scenario' in oq.calculation_mode: imtls = dstore['oqparam'].imtls gsims = [str(rlz.gsim_rlz) for rlz in rlzs_assoc.realizations] n_gmfs = oq.number_of_ground_motion_fields fields = ['%03d' % i for i in range(n_gmfs)] dt = numpy.dtype([(f, F32) for f in fields]) etags, gmfs_ = calc.get_gmfs(dstore) sitemesh = get_mesh(dstore['sitecol']) writer = writers.CsvWriter(fmt='%.5f') for gsim, gmfa in zip(gsims, gmfs_): # gmfa of shape (N, I, E) for imti, imt in enumerate(imtls): gmfs = numpy.zeros(len(gmfa), dt) for e, event in enumerate(dt.names): gmfs[event] = gmfa[:, imti, e] dest = dstore.build_fname('gmf', '%s-%s' % (gsim, imt), 'csv') data = util.compose_arrays(sitemesh, gmfs) writer.save(data, dest) return writer.getsaved() else: # event based eid = int(ekey[0].split('/')[1]) if '/' in ekey[0] else None gmfa = numpy.fromiter( GmfDataGetter.gen_gmfs(dstore['gmf_data'], rlzs_assoc, eid), gmf_data_dt) if eid is None: # new format fname = dstore.build_fname('gmf', 'data', 'csv') gmfa.sort(order=['rlzi', 'sid', 'eid', 'imti']) writers.write_csv(fname, gmfa) return [fname] # old format for single eid fnames = [] imts = list(oq.imtls) for rlzi, array in group_array(gmfa, 'rlzi').items(): rlz = rlzs_assoc.realizations[rlzi] data, comment = _build_csv_data(array, rlz, dstore['sitecol'], imts, oq.investigation_time) fname = dstore.build_fname('gmf', '%d-rlz-%03d' % (eid, rlzi), 'csv') writers.write_csv(fname, data, comment=comment) fnames.append(fname) return fnames
def export_gmf_scenario(ekey, dstore): oq = dstore['oqparam'] if 'scenario' in oq.calculation_mode: fields = ['%03d' % i for i in range(len(dstore['etags']))] dt = numpy.dtype([(f, F32) for f in fields]) etags, gmfs_by_trt_gsim = calc.get_gmfs(dstore) sitemesh = dstore['sitemesh'] writer = writers.CsvWriter(fmt='%.5f') for (trt, gsim), gmfs_ in gmfs_by_trt_gsim.items(): for imt in gmfs_.dtype.names: gmfs = numpy.zeros(len(gmfs_), dt) for i in range(len(gmfs)): gmfs[i] = tuple(gmfs_[imt][i]) dest = dstore.export_path('gmf-%s-%s.csv' % (gsim, imt)) data = util.compose_arrays(sitemesh, gmfs) writer.save(data, dest) else: # event based logging.warn('Not exporting the full GMFs for event_based, but you can' ' specify the rupture ordinals with gmfs:R1,...,Rn') return [] return writer.getsaved()
def pre_execute(self): logging.warn('%s is still experimental', self.__class__.__name__) base.RiskCalculator.pre_execute(self) logging.info('Building the epsilons') oq = self.oqparam self.L = len(self.riskmodel.lti) self.T = len(self.assetcol.taxonomies) self.A = len(self.assetcol) self.E = oq.number_of_ground_motion_fields self.I = oq.insured_losses + 1 if oq.ignore_covs: eps = numpy.zeros((self.A, self.E), numpy.float32) else: eps = self.make_eps(self.E) eids, gmfs = calc.get_gmfs(self.datastore, self.precalc) self.R = len(gmfs) self.riskinputs = self.build_riskinputs('gmf', gmfs, eps, eids) self.param['assetcol'] = self.assetcol self.param['insured_losses'] = oq.insured_losses self.param['avg_losses'] = oq.avg_losses self.param['asset_loss_table'] = oq.asset_loss_table or oq.loss_ratios self.param['elt_dt'] = numpy.dtype([('eid', U64), ('loss', (F32, (self.L * self.I, )))]) self.taskno = 0 self.start = 0 self.datastore.create_dset('losses_by_tag-rlzs', F32, (self.T, self.R, self.L * self.I)) avg_losses = self.oqparam.avg_losses if avg_losses: self.dset = self.datastore.create_dset( 'avg_losses-rlzs', F32, (self.A, self.R, self.L * self.I)) events = numpy.zeros(oq.number_of_ground_motion_fields, calc.stored_event_dt) events['eid'] = eids self.datastore['events/grp-00'] = events