Пример #1
0
 def pre_execute(self):
     if 'gmfs' in self.oqparam.inputs:
         self.pre_calculator = None
     base.RiskCalculator.pre_execute(self)
     base.get_gmfs(self)
     self.param['number_of_ground_motion_fields'] = (
         self.oqparam.number_of_ground_motion_fields)
     self.param['consequence_models'] = riskmodels.get_risk_models(
         self.oqparam, 'consequence')
     self.riskinputs = self.build_riskinputs('gmf')
     self.param['tags'] = self.assetcol.tags()
Пример #2
0
def export_gmf_spec(ekey, dstore, spec):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    :param spec: a string specifying what to export exactly
    """
    oq = dstore['oqparam']
    eids = numpy.array([int(rid) for rid in spec.split(',')])
    sitemesh = dstore['sitemesh']
    writer = writers.CsvWriter(fmt='%.5f')
    etags = dstore['etags']
    if 'scenario' in oq.calculation_mode:
        _, gmfs_by_trt_gsim = base.get_gmfs(dstore)
        gsims = sorted(gsim for trt, gsim in gmfs_by_trt_gsim)
        imts = gmfs_by_trt_gsim[0, gsims[0]].dtype.names
        gmf_dt = numpy.dtype([(str(gsim), F32) for gsim in gsims])
        for eid in eids:
            etag = etags[eid]
            for imt in imts:
                gmfa = numpy.zeros(len(sitemesh), gmf_dt)
                for gsim in gsims:
                    gmfa[str(gsim)] = gmfs_by_trt_gsim[0, gsim][imt][:, eid]
                dest = dstore.export_path('gmf-%s-%s.csv' % (etag, imt))
                data = util.compose_arrays(sitemesh, gmfa)
                writer.save(data, dest)
    else:  # event based
        for eid in eids:
            etag = etags[eid]
            for gmfa, imt in _get_gmfs(dstore, util.get_serial(etag), eid):
                dest = dstore.export_path('gmf-%s-%s.csv' % (etag, imt))
                data = util.compose_arrays(sitemesh, gmfa)
                writer.save(data, dest)
    return writer.getsaved()
Пример #3
0
def export_gmf_spec(ekey, dstore, spec):
    """
    :param ekey: export key, i.e. a pair (datastore key, fmt)
    :param dstore: datastore object
    :param spec: a string specifying what to export exactly
    """
    oq = dstore['oqparam']
    eids = numpy.array([int(rid) for rid in spec.split(',')])
    sitemesh = dstore['sitemesh']
    writer = writers.CsvWriter(fmt='%.5f')
    etags = dstore['etags']
    if 'scenario' in oq.calculation_mode:
        _, gmfs_by_trt_gsim = base.get_gmfs(dstore)
        gsims = sorted(gsim for trt, gsim in gmfs_by_trt_gsim)
        imts = gmfs_by_trt_gsim[0, gsims[0]].dtype.names
        gmf_dt = numpy.dtype([(str(gsim), F32) for gsim in gsims])
        for eid in eids:
            etag = etags[eid]
            for imt in imts:
                gmfa = numpy.zeros(len(sitemesh), gmf_dt)
                for gsim in gsims:
                    gmfa[str(gsim)] = gmfs_by_trt_gsim[0, gsim][imt][:, eid]
                dest = dstore.export_path('gmf-%s-%s.csv' % (etag, imt))
                data = util.compose_arrays(sitemesh, gmfa)
                writer.save(data, dest)
    else:  # event based
        for eid in eids:
            etag = etags[eid]
            for gmfa, imt in _get_gmfs(dstore, util.get_serial(etag), eid):
                dest = dstore.export_path('gmf-%s-%s.csv' % (etag, imt))
                data = util.compose_arrays(sitemesh, gmfa)
                writer.save(data, dest)
    return writer.getsaved()
Пример #4
0
 def pre_execute(self):
     """
     Compute the GMFs, build the epsilons, the riskinputs, and a dictionary
     with the unit of measure, used in the export phase.
     """
     if 'gmfs' in self.oqparam.inputs:
         self.pre_calculator = None
     base.RiskCalculator.pre_execute(self)
     if self.oqparam.shakemap_id or 'shakemap' in self.oqparam.inputs:
         self.read_shakemap()
         self.R = 1
     else:
         _, self.R = base.get_gmfs(self)
     self.assetcol = self.datastore['assetcol']
     A = len(self.assetcol)
     E = self.oqparam.number_of_ground_motion_fields
     logging.info('Building the epsilons')
     if self.oqparam.ignore_covs:
         eps = numpy.zeros((A, E), numpy.float32)
     else:
         eps = self.make_eps(E)
     self.riskinputs = self.build_riskinputs('gmf', eps, E)
     self.param['number_of_ground_motion_fields'] = E
     self.param['insured_losses'] = self.oqparam.insured_losses
     self.param['asset_loss_table'] = self.oqparam.asset_loss_table
Пример #5
0
 def pre_execute(self):
     if 'gmfs' in self.oqparam.inputs:
         self.pre_calculator = None
     base.RiskCalculator.pre_execute(self)
     self.monitor.consequence_models = riskmodels.get_risk_models(
         self.oqparam, 'consequence')
     _, gmfs = base.get_gmfs(self)
     self.riskinputs = self.build_riskinputs(gmfs)
     self.monitor.taxonomies = sorted(self.taxonomies)
Пример #6
0
 def pre_execute(self):
     if 'gmfs' in self.oqparam.inputs:
         self.pre_calculator = None
     base.RiskCalculator.pre_execute(self)
     self.monitor.consequence_models = riskmodels.get_risk_models(
         self.oqparam, 'consequence')
     self.etags, gmfs = base.get_gmfs(self.datastore)
     self.riskinputs = self.build_riskinputs(gmfs)
     self.monitor.taxonomies = sorted(self.taxonomies)
Пример #7
0
 def pre_execute(self):
     """
     Compute the GMFs, build the epsilons, the riskinputs, and a dictionary
     with the unit of measure, used in the export phase.
     """
     if 'gmfs' in self.oqparam.inputs:
         self.pre_calculator = None
     base.RiskCalculator.pre_execute(self)
     logging.info('Building the epsilons')
     self.epsilon_matrix = self.make_eps(
         self.oqparam.number_of_ground_motion_fields)
     sitecol, gmfs = base.get_gmfs(self)
     self.riskinputs = self.build_riskinputs(gmfs, self.epsilon_matrix)
Пример #8
0
 def pre_execute(self):
     """
     Compute the GMFs, build the epsilons, the riskinputs, and a dictionary
     with the unit of measure, used in the export phase.
     """
     if 'gmfs' in self.oqparam.inputs:
         self.pre_calculator = None
     base.RiskCalculator.pre_execute(self)
     logging.info('Building the epsilons')
     self.epsilon_matrix = self.make_eps(
         self.oqparam.number_of_ground_motion_fields)
     sitecol, gmfs = base.get_gmfs(self)
     self.riskinputs = self.build_riskinputs(gmfs, self.epsilon_matrix)
Пример #9
0
 def pre_execute(self):
     if 'gmfs' in self.oqparam.inputs:
         self.pre_calculator = None
     base.RiskCalculator.pre_execute(self)
     if self.oqparam.shakemap_id or 'shakemap' in self.oqparam.inputs:
         self.read_shakemap()
         self.R = 1
     else:
         _, self.R = base.get_gmfs(self)
     E = self.oqparam.number_of_ground_motion_fields
     self.param['number_of_ground_motion_fields'] = E
     self.param['consequence_models'] = riskmodels.get_risk_models(
         self.oqparam, 'consequence')
     self.riskinputs = self.build_riskinputs('gmf', num_events=E)
     self.param['tags'] = list(self.assetcol.tagcol)
Пример #10
0
def export_gmf_scenario(ekey, dstore):
    oq = dstore['oqparam']
    if 'scenario' in oq.calculation_mode:
        fields = ['%03d' % i for i in range(len(dstore['etags']))]
        dt = numpy.dtype([(f, F32) for f in fields])
        etags, gmfs_by_trt_gsim = base.get_gmfs(dstore)
        sitemesh = dstore['sitemesh']
        writer = writers.CsvWriter(fmt='%.5f')
        for (trt, gsim), gmfs_ in gmfs_by_trt_gsim.items():
            for imt in gmfs_.dtype.names:
                gmfs = numpy.zeros(len(gmfs_), dt)
                for i in range(len(gmfs)):
                    gmfs[i] = tuple(gmfs_[imt][i])
                dest = dstore.export_path('gmf-%s-%s.csv' % (gsim, imt))
                data = util.compose_arrays(sitemesh, gmfs)
                writer.save(data, dest)
    else:  # event based
        logging.warn('Not exporting the full GMFs for event_based, but you can'
                     ' specify the rupture ordinals with gmfs:R1,...,Rn')
        return []
    return writer.getsaved()
Пример #11
0
def export_gmf_scenario(ekey, dstore):
    oq = dstore['oqparam']
    if 'scenario' in oq.calculation_mode:
        fields = ['%03d' % i for i in range(len(dstore['etags']))]
        dt = numpy.dtype([(f, F32) for f in fields])
        etags, gmfs_by_trt_gsim = base.get_gmfs(dstore)
        sitemesh = dstore['sitemesh']
        writer = writers.CsvWriter(fmt='%.5f')
        for (trt, gsim), gmfs_ in gmfs_by_trt_gsim.items():
            for imt in gmfs_.dtype.names:
                gmfs = numpy.zeros(len(gmfs_), dt)
                for i in range(len(gmfs)):
                    gmfs[i] = tuple(gmfs_[imt][i])
                dest = dstore.export_path('gmf-%s-%s.csv' % (gsim, imt))
                data = util.compose_arrays(sitemesh, gmfs)
                writer.save(data, dest)
    else:  # event based
        logging.warn('Not exporting the full GMFs for event_based, but you can'
                     ' specify the rupture ordinals with gmfs:R1,...,Rn')
        return []
    return writer.getsaved()
Пример #12
0
 def pre_execute(self):
     logging.warn('%s is still experimental', self.__class__.__name__)
     base.RiskCalculator.pre_execute(self)
     oq = self.oqparam
     self.L = len(self.riskmodel.lti)
     self.T = len(self.assetcol.tagcol)
     self.A = len(self.assetcol)
     self.I = oq.insured_losses + 1
     if oq.hazard_calculation_id:  # read the GMFs from a previous calc
         assert 'gmfs' not in oq.inputs, 'no gmfs_file when using --hc!'
         parent = self.read_previous(oq.hazard_calculation_id)
         oqp = parent['oqparam']
         if oqp.investigation_time != oq.investigation_time:
             raise ValueError(
                 'The parent calculation was using investigation_time=%s'
                 ' != %s' % (oqp.investigation_time, oq.investigation_time))
         if oqp.minimum_intensity != oq.minimum_intensity:
             raise ValueError(
                 'The parent calculation was using minimum_intensity=%s'
                 ' != %s' % (oqp.minimum_intensity, oq.minimum_intensity))
         self.eids = parent['events']['eid']
         self.datastore['csm_info'] = parent['csm_info']
         self.rlzs_assoc = parent['csm_info'].get_rlzs_assoc()
         self.R = len(self.rlzs_assoc.realizations)
     else:  # read the GMFs from a file
         if 'site_model' in oq.inputs:
             raise InvalidFile('it makes no sense to define a site model in'
                               ' %(job_ini)s' % oq.inputs)
         with self.monitor('reading GMFs', measuremem=True):
             fname = oq.inputs['gmfs']
             sids = self.sitecol.complete.sids
             if fname.endswith('.xml'):  # old approach
                 self.eids, self.R = base.get_gmfs(self)
             else:  # import csv
                 self.eids, self.R, self.gmdata = base.import_gmfs(
                     self.datastore, fname, sids)
                 event_based.save_gmdata(self, self.R)
     self.E = len(self.eids)
     eps = riskinput.make_epsilon_getter(
         len(self.assetcol), self.E, oq.asset_correlation, oq.master_seed,
         oq.ignore_covs or not self.riskmodel.covs)()
     self.riskinputs = self.build_riskinputs('gmf', eps, self.eids)
     self.param['gmf_ebrisk'] = True
     self.param['insured_losses'] = oq.insured_losses
     self.param['avg_losses'] = oq.avg_losses
     self.param['ses_ratio'] = oq.ses_ratio
     self.param['asset_loss_table'] = oq.asset_loss_table
     self.param['elt_dt'] = numpy.dtype([('eid', U64), ('rlzi', U16),
                                         ('loss', (F32,
                                                   (self.L * self.I, )))])
     self.taskno = 0
     self.start = 0
     avg_losses = self.oqparam.avg_losses
     if avg_losses:
         self.dset = self.datastore.create_dset(
             'avg_losses-rlzs', F32, (self.A, self.R, self.L * self.I))
     self.agglosses = numpy.zeros((self.E, self.R, self.L * self.I), F32)
     self.vals = self.assetcol.values()
     self.num_losses = numpy.zeros((self.A, self.R), U32)
     if oq.asset_loss_table:
         # save all_loss_ratios
         self.alr_nbytes = 0
         self.indices = collections.defaultdict(list)  # sid -> pairs
Пример #13
0
 def pre_execute(self):
     oq = self.oqparam
     if 'gmfs' in oq.inputs:
         self.pre_calculator = None
     base.RiskCalculator.pre_execute(self)
     if not hasattr(self, 'assetcol'):
         self.assetcol = self.datastore['assetcol']
     self.L = len(self.riskmodel.lti)
     self.T = len(self.assetcol.tagcol)
     self.A = len(self.assetcol)
     self.I = oq.insured_losses + 1
     parent = self.datastore.parent
     self.precomputed_gmfs = 'gmf_data' in parent or 'gmfs' in oq.inputs
     if not self.precomputed_gmfs:
         return
     if 'gmf_data' in parent:
         # read the GMFs from a previous calc
         assert 'gmfs' not in oq.inputs, 'no gmfs_file when using --hc!'
         oqp = parent['oqparam']
         if oqp.investigation_time != oq.investigation_time:
             raise ValueError(
                 'The parent calculation was using investigation_time=%s'
                 ' != %s' % (oqp.investigation_time, oq.investigation_time))
         if oqp.minimum_intensity != oq.minimum_intensity:
             raise ValueError(
                 'The parent calculation was using minimum_intensity=%s'
                 ' != %s' % (oqp.minimum_intensity, oq.minimum_intensity))
         # sorting the eids is essential to get the epsilons in the right
         # order (i.e. consistent with the one used in ebr from ruptures)
         self.eids = sorted(parent['events']['eid'])
         self.datastore['csm_info'] = parent['csm_info']
         self.rlzs_assoc = parent['csm_info'].get_rlzs_assoc()
         self.R = len(self.rlzs_assoc.realizations)
     else:  # read the GMFs from a file
         with self.monitor('reading GMFs', measuremem=True):
             fname = oq.inputs['gmfs']
             sids = self.sitecol.complete.sids
             if fname.endswith('.xml'):  # old approach
                 self.eids, self.R = base.get_gmfs(self)
             else:  # import csv
                 self.eids, self.R, self.gmdata = base.import_gmfs(
                     self.datastore, fname, sids)
                 event_based.save_gmdata(self, self.R)
     self.E = len(self.eids)
     eps = self.epsilon_getter()()
     self.riskinputs = self.build_riskinputs('gmf', eps, self.E)
     self.param['gmf_ebrisk'] = True
     self.param['insured_losses'] = oq.insured_losses
     self.param['avg_losses'] = oq.avg_losses
     self.param['ses_ratio'] = oq.ses_ratio
     self.param['asset_loss_table'] = oq.asset_loss_table
     self.param['elt_dt'] = numpy.dtype([('eid', U64), ('rlzi', U16),
                                         ('loss', (F32,
                                                   (self.L * self.I, )))])
     self.taskno = 0
     self.start = 0
     avg_losses = self.oqparam.avg_losses
     if avg_losses:
         self.dset = self.datastore.create_dset(
             'avg_losses-rlzs', F32, (self.A, self.R, self.L * self.I))
     self.agglosses = numpy.zeros((self.E, self.R, self.L * self.I), F32)
     self.vals = self.assetcol.values()
     self.num_losses = numpy.zeros((self.A, self.R), U32)
     if oq.asset_loss_table:
         # save all_loss_ratios
         self.alr_nbytes = 0
         self.indices = collections.defaultdict(list)  # sid -> pairs