示例#1
0
 def epsilon_getter(self):
     """
     :returns: a callable (start, stop) producing a slice of epsilons
     """
     return riskinput.make_epsilon_getter(
         len(self.assetcol), self.E, self.oqparam.asset_correlation,
         self.oqparam.master_seed, self.oqparam.ignore_covs
         or not self.riskmodel.covs)
示例#2
0
    def execute(self):
        """
        Run the calculator and aggregate the results
        """
        if self.oqparam.number_of_logic_tree_samples:
            logging.warn('The event based risk calculator with sampling is '
                         'EXPERIMENTAL, UNTESTED and SLOW')
        if self.oqparam.ground_motion_fields:
            logging.warn('To store the ground motion fields change '
                         'calculation_mode = event_based')
        if self.oqparam.hazard_curves_from_gmfs:
            logging.warn('To compute the hazard curves change '
                         'calculation_mode = event_based')

        if 'all_loss_ratios' in self.datastore:
            # event based risk calculation already done, postprocess
            EbrPostCalculator(self).run(close=False)
            return

        self.csm_info = self.datastore['csm_info']
        if self.precalc:
            self.ruptures_by_grp = self.precalc.result
            # the ordering of the ruptures is essential for repeatibility
            for grp in self.ruptures_by_grp:
                self.ruptures_by_grp[grp].sort(
                    key=operator.attrgetter('serial'))
        else:  # there is a parent calculation
            self.ruptures_by_grp = calc.RuptureGetter.from_(
                self.datastore.parent)
        num_rlzs = 0
        allres = []
        source_models = self.csm_info.source_models
        self.sm_by_grp = self.csm_info.get_sm_by_grp()
        num_events = len(self.datastore['events'])
        if not hasattr(self, 'assetcol'):
            self.assetcol = self.datastore['assetcol']
        self.get_eps = riskinput.make_epsilon_getter(
            len(self.assetcol), num_events, self.oqparam.asset_correlation,
            self.oqparam.master_seed, self.oqparam.ignore_covs
            or not self.riskmodel.covs)
        self.assets_by_site = self.assetcol.assets_by_site()
        self.start = 0
        self.riskmodel.taxonomy = self.assetcol.tagcol.taxonomy
        for i, args in enumerate(self.gen_args()):
            ires = self.start_tasks(*args)
            allres.append(ires)
            ires.rlz_slice = slice(num_rlzs, num_rlzs + ires.num_rlzs)
            num_rlzs += ires.num_rlzs
            for sg in source_models[i].src_groups:
                sg.eff_ruptures = ires.num_ruptures.get(sg.id, 0)
        num_events = self.save_results(allres, num_rlzs)
        return num_events  # {sm_id: #events}
示例#3
0
    def execute(self):
        """
        Run the calculator and aggregate the results
        """
        if self.oqparam.number_of_logic_tree_samples:
            logging.warn('The event based risk calculator with sampling is '
                         'EXPERIMENTAL, UNTESTED and SLOW')
        if self.oqparam.ground_motion_fields:
            logging.warn('To store the ground motion fields change '
                         'calculation_mode = event_based')
        if self.oqparam.hazard_curves_from_gmfs:
            logging.warn('To compute the hazard curves change '
                         'calculation_mode = event_based')

        if 'all_loss_ratios' in self.datastore:
            EbrPostCalculator(self).run(close=False)
            return

        self.csm_info = self.datastore['csm_info']
        with self.monitor('reading ruptures', autoflush=True):
            ruptures_by_grp = (self.precalc.result if self.precalc else
                               calc.get_ruptures_by_grp(self.datastore.parent))
            # the ordering of the ruptures is essential for repeatibility
            for grp in ruptures_by_grp:
                ruptures_by_grp[grp].sort(key=operator.attrgetter('serial'))
        num_rlzs = 0
        allres = []
        source_models = self.csm_info.source_models
        self.sm_by_grp = self.csm_info.get_sm_by_grp()
        num_events = sum(ebr.multiplicity for grp in ruptures_by_grp
                         for ebr in ruptures_by_grp[grp])
        self.get_eps = riskinput.make_epsilon_getter(
            len(self.assetcol), num_events, self.oqparam.asset_correlation,
            self.oqparam.master_seed, self.oqparam.ignore_covs
            or not self.riskmodel.covs)
        self.assets_by_site = self.assetcol.assets_by_site()
        self.start = 0
        for i, args in enumerate(self.gen_args(ruptures_by_grp)):
            ires = self.start_tasks(*args)
            allres.append(ires)
            ires.rlz_slice = slice(num_rlzs, num_rlzs + ires.num_rlzs)
            num_rlzs += ires.num_rlzs
            for sg in source_models[i].src_groups:
                sg.eff_ruptures = ires.num_ruptures.get(sg.id, 0)
        num_events = self.save_results(allres, num_rlzs)
        return num_events  # {sm_id: #events}
示例#4
0
 def pre_execute(self):
     logging.warn('%s is still experimental', self.__class__.__name__)
     base.RiskCalculator.pre_execute(self)
     oq = self.oqparam
     self.L = len(self.riskmodel.lti)
     self.T = len(self.assetcol.tagcol)
     self.A = len(self.assetcol)
     self.I = oq.insured_losses + 1
     if oq.hazard_calculation_id:  # read the GMFs from a previous calc
         assert 'gmfs' not in oq.inputs, 'no gmfs_file when using --hc!'
         parent = self.read_previous(oq.hazard_calculation_id)
         oqp = parent['oqparam']
         if oqp.investigation_time != oq.investigation_time:
             raise ValueError(
                 'The parent calculation was using investigation_time=%s'
                 ' != %s' % (oqp.investigation_time, oq.investigation_time))
         if oqp.minimum_intensity != oq.minimum_intensity:
             raise ValueError(
                 'The parent calculation was using minimum_intensity=%s'
                 ' != %s' % (oqp.minimum_intensity, oq.minimum_intensity))
         self.eids = parent['events']['eid']
         self.datastore['csm_info'] = parent['csm_info']
         self.rlzs_assoc = parent['csm_info'].get_rlzs_assoc()
         self.R = len(self.rlzs_assoc.realizations)
     else:  # read the GMFs from a file
         if 'site_model' in oq.inputs:
             raise InvalidFile('it makes no sense to define a site model in'
                               ' %(job_ini)s' % oq.inputs)
         with self.monitor('reading GMFs', measuremem=True):
             fname = oq.inputs['gmfs']
             sids = self.sitecol.complete.sids
             if fname.endswith('.xml'):  # old approach
                 self.eids, self.R = base.get_gmfs(self)
             else:  # import csv
                 self.eids, self.R, self.gmdata = base.import_gmfs(
                     self.datastore, fname, sids)
                 event_based.save_gmdata(self, self.R)
     self.E = len(self.eids)
     eps = riskinput.make_epsilon_getter(
         len(self.assetcol), self.E, oq.asset_correlation, oq.master_seed,
         oq.ignore_covs or not self.riskmodel.covs)()
     self.riskinputs = self.build_riskinputs('gmf', eps, self.eids)
     self.param['gmf_ebrisk'] = True
     self.param['insured_losses'] = oq.insured_losses
     self.param['avg_losses'] = oq.avg_losses
     self.param['ses_ratio'] = oq.ses_ratio
     self.param['asset_loss_table'] = oq.asset_loss_table
     self.param['elt_dt'] = numpy.dtype([('eid', U64), ('rlzi', U16),
                                         ('loss', (F32,
                                                   (self.L * self.I, )))])
     self.taskno = 0
     self.start = 0
     avg_losses = self.oqparam.avg_losses
     if avg_losses:
         self.dset = self.datastore.create_dset(
             'avg_losses-rlzs', F32, (self.A, self.R, self.L * self.I))
     self.agglosses = numpy.zeros((self.E, self.R, self.L * self.I), F32)
     self.vals = self.assetcol.values()
     self.num_losses = numpy.zeros((self.A, self.R), U32)
     if oq.asset_loss_table:
         # save all_loss_ratios
         self.alr_nbytes = 0
         self.indices = collections.defaultdict(list)  # sid -> pairs