Exemple #1
0
    def save_results(self, allres, num_rlzs):
        """
        :param allres: an iterable of result iterators
        :param num_rlzs: the total number of realizations
        :returns: the total number of events
        """
        oq = self.oqparam
        self.R = num_rlzs
        self.A = len(self.assetcol)
        self.tagmask = self.assetcol.tagmask()  # shape (A, T)
        if oq.asset_loss_table:
            # save all_loss_ratios
            self.alr_nbytes = 0
            self.indices = collections.defaultdict(list)  # sid -> pairs

        if oq.avg_losses:
            self.dset = self.datastore.create_dset(
                'avg_losses-rlzs', F32, (self.A, self.R, self.L * self.I))

        num_events = collections.Counter()
        self.gmdata = AccumDict(accum=numpy.zeros(len(oq.imtls) + 2, F32))
        self.taskno = 0
        self.start = 0
        self.num_losses = numpy.zeros((self.A, self.R), U32)
        for res in allres:
            start, stop = res.rlz_slice.start, res.rlz_slice.stop
            for dic in res:
                for r, arr in dic.pop('gmdata').items():
                    self.gmdata[start + r] += arr
                self.save_losses(dic, start)
            logging.debug(
                'Saving results for source model #%d, realizations %d:%d',
                res.sm_id + 1, start, stop)
            if hasattr(res, 'eff_ruptures'):  # for UCERF
                self.eff_ruptures += res.eff_ruptures
            if hasattr(res, 'ruptures_by_grp'):  # for UCERF
                save_ruptures(self, res.ruptures_by_grp)
            elif hasattr(res, 'events_by_grp'):  # for UCERF
                for grp_id in res.events_by_grp:
                    events = res.events_by_grp[grp_id]
                    self.datastore.extend('events', events)
            num_events[res.sm_id] += res.num_events
        if 'all_loss_ratios' in self.datastore:
            self.datastore['all_loss_ratios/num_losses'] = self.num_losses
            self.datastore.set_attrs('all_loss_ratios/num_losses',
                                     nbytes=self.num_losses.nbytes)
        del self.num_losses
        event_based.save_gmdata(self, num_rlzs)
        return num_events
Exemple #2
0
    def save_results(self, allres, num_rlzs):
        """
        :param allres: an iterable of result iterators
        :param num_rlzs: the total number of realizations
        :returns: the total number of events
        """
        self.R = num_rlzs
        self.A = len(self.assetcol)
        tags = [tag.encode('ascii') for tag in self.assetcol.tags()]
        T = len(tags)
        self.datastore.create_dset('losses_by_tag-rlzs', F32,
                                   (T, self.R, self.L * self.I))
        self.datastore.set_attrs('losses_by_tag-rlzs',
                                 tags=tags,
                                 nbytes=4 * T * self.R * self.L * self.I)
        if self.oqparam.asset_loss_table or self.oqparam.loss_ratios:
            # save all_loss_ratios
            self.alr_nbytes = 0
            self.indices = collections.defaultdict(list)  # sid -> pairs

        avg_losses = self.oqparam.avg_losses
        if avg_losses:
            self.dset = self.datastore.create_dset(
                'avg_losses-rlzs', F32, (self.A, self.R, self.L * self.I))

        num_events = collections.Counter()
        self.gmdata = {}
        self.taskno = 0
        self.start = 0
        for res in allres:
            start, stop = res.rlz_slice.start, res.rlz_slice.stop
            for dic in res:
                self.gmdata += dic.pop('gmdata')
                self.save_losses(dic, start)
            logging.debug(
                'Saving results for source model #%d, realizations %d:%d',
                res.sm_id + 1, start, stop)
            if hasattr(res, 'ruptures_by_grp'):
                save_ruptures(self, res.ruptures_by_grp)
            elif hasattr(res, 'events_by_grp'):
                for grp_id in res.events_by_grp:
                    events = res.events_by_grp[grp_id]
                    self.datastore.extend('events/grp-%02d' % grp_id, events)
            num_events[res.sm_id] += res.num_events
        event_based.save_gmdata(self, num_rlzs)
        return num_events
Exemple #3
0
    def save_results(self, allres, num_rlzs):
        """
        :param allres: an iterable of result iterators
        :param num_rlzs: the total number of realizations
        :returns: the total number of events
        """
        self.R = num_rlzs
        self.A = len(self.assetcol)
        num_tax = len(self.assetcol.taxonomies)
        self.datastore.create_dset('losses_by_taxon-rlzs', F32,
                                   (num_tax, self.R, self.L * self.I))

        if self.oqparam.asset_loss_table or self.oqparam.loss_ratios:
            # save all_loss_ratios
            self.T = sum(ires.num_tasks for ires in allres)
            self.alr_nbytes = 0
            self.datastore.create_dset('all_loss_ratios/indices', U32,
                                       (self.A, self.T, 2))

        avg_losses = self.oqparam.avg_losses
        if avg_losses:
            self.dset = self.datastore.create_dset(
                'avg_losses-rlzs', F32, (self.A, self.R, self.L * self.I))

        num_events = collections.Counter()
        self.gmdata = {}
        self.taskno = 0
        self.start = 0
        for res in allres:
            start, stop = res.rlz_slice.start, res.rlz_slice.stop
            for dic in res:
                self.gmdata += dic.pop('gmdata')
                self.save_losses(dic, start)
            logging.debug(
                'Saving results for source model #%d, realizations %d:%d',
                res.sm_id + 1, start, stop)
            if hasattr(res, 'ruptures_by_grp'):
                save_ruptures(self, res.ruptures_by_grp)
            elif hasattr(res, 'events_by_grp'):
                for grp_id in res.events_by_grp:
                    events = res.events_by_grp[grp_id]
                    self.datastore.extend('events/grp-%02d' % grp_id, events)
            num_events[res.sm_id] += res.num_events
        event_based.save_gmdata(self, num_rlzs)
        return num_events
Exemple #4
0
    def post_execute(self, result):
        """
        Compute stats for the aggregated distributions and save
        the results on the datastore.
        """
        dstates = self.riskmodel.damage_states
        ltypes = self.riskmodel.loss_types
        L = len(ltypes)
        R = len(self.rlzs_assoc.realizations)
        D = len(dstates)
        N = len(self.assetcol)

        # damage distributions
        dt_list = []
        for ltype in ltypes:
            dt_list.append(
                (ltype, numpy.dtype([('mean', (F32, D)),
                                     ('stddev', (F32, D))])))
        multi_stat_dt = numpy.dtype(dt_list)
        d_asset = numpy.zeros((N, R, L, 2, D), F32)
        for (l, r, a, stat) in result['d_asset']:
            d_asset[a, r, l] = stat
        self.datastore['dmg_by_asset'] = dist_by_asset(
            d_asset, multi_stat_dt, self.assetcol.array['number'])

        # consequence distributions
        if result['c_asset']:
            stat_dt = numpy.dtype([('mean', F32), ('stddev', F32)])
            c_asset = numpy.zeros((N, R, L), stat_dt)
            for (l, r, a, stat) in result['c_asset']:
                c_asset[a, r, l] = stat
            multi_stat_dt = self.oqparam.loss_dt(stat_dt)
            self.datastore['losses_by_asset'] = c_asset

        # save gmdata
        self.gmdata = result['gmdata']
        for arr in self.gmdata.values():
            arr[-2] = self.oqparam.number_of_ground_motion_fields  # events
        event_based.save_gmdata(self, R)
Exemple #5
0
 def pre_execute(self):
     logging.warn('%s is still experimental', self.__class__.__name__)
     base.RiskCalculator.pre_execute(self)
     oq = self.oqparam
     self.L = len(self.riskmodel.lti)
     self.T = len(self.assetcol.tagcol)
     self.A = len(self.assetcol)
     self.I = oq.insured_losses + 1
     if oq.hazard_calculation_id:  # read the GMFs from a previous calc
         assert 'gmfs' not in oq.inputs, 'no gmfs_file when using --hc!'
         parent = self.read_previous(oq.hazard_calculation_id)
         oqp = parent['oqparam']
         if oqp.investigation_time != oq.investigation_time:
             raise ValueError(
                 'The parent calculation was using investigation_time=%s'
                 ' != %s' % (oqp.investigation_time, oq.investigation_time))
         if oqp.minimum_intensity != oq.minimum_intensity:
             raise ValueError(
                 'The parent calculation was using minimum_intensity=%s'
                 ' != %s' % (oqp.minimum_intensity, oq.minimum_intensity))
         self.eids = parent['events']['eid']
         self.datastore['csm_info'] = parent['csm_info']
         self.rlzs_assoc = parent['csm_info'].get_rlzs_assoc()
         self.R = len(self.rlzs_assoc.realizations)
     else:  # read the GMFs from a file
         if 'site_model' in oq.inputs:
             raise InvalidFile('it makes no sense to define a site model in'
                               ' %(job_ini)s' % oq.inputs)
         with self.monitor('reading GMFs', measuremem=True):
             fname = oq.inputs['gmfs']
             sids = self.sitecol.complete.sids
             if fname.endswith('.xml'):  # old approach
                 self.eids, self.R = base.get_gmfs(self)
             else:  # import csv
                 self.eids, self.R, self.gmdata = base.import_gmfs(
                     self.datastore, fname, sids)
                 event_based.save_gmdata(self, self.R)
     self.E = len(self.eids)
     eps = riskinput.make_epsilon_getter(
         len(self.assetcol), self.E, oq.asset_correlation, oq.master_seed,
         oq.ignore_covs or not self.riskmodel.covs)()
     self.riskinputs = self.build_riskinputs('gmf', eps, self.eids)
     self.param['gmf_ebrisk'] = True
     self.param['insured_losses'] = oq.insured_losses
     self.param['avg_losses'] = oq.avg_losses
     self.param['ses_ratio'] = oq.ses_ratio
     self.param['asset_loss_table'] = oq.asset_loss_table
     self.param['elt_dt'] = numpy.dtype([('eid', U64), ('rlzi', U16),
                                         ('loss', (F32,
                                                   (self.L * self.I, )))])
     self.taskno = 0
     self.start = 0
     avg_losses = self.oqparam.avg_losses
     if avg_losses:
         self.dset = self.datastore.create_dset(
             'avg_losses-rlzs', F32, (self.A, self.R, self.L * self.I))
     self.agglosses = numpy.zeros((self.E, self.R, self.L * self.I), F32)
     self.vals = self.assetcol.values()
     self.num_losses = numpy.zeros((self.A, self.R), U32)
     if oq.asset_loss_table:
         # save all_loss_ratios
         self.alr_nbytes = 0
         self.indices = collections.defaultdict(list)  # sid -> pairs
Exemple #6
0
 def pre_execute(self):
     oq = self.oqparam
     if 'gmfs' in oq.inputs:
         self.pre_calculator = None
     base.RiskCalculator.pre_execute(self)
     if not hasattr(self, 'assetcol'):
         self.assetcol = self.datastore['assetcol']
     self.L = len(self.riskmodel.lti)
     self.T = len(self.assetcol.tagcol)
     self.A = len(self.assetcol)
     self.I = oq.insured_losses + 1
     parent = self.datastore.parent
     self.precomputed_gmfs = 'gmf_data' in parent or 'gmfs' in oq.inputs
     if not self.precomputed_gmfs:
         return
     if 'gmf_data' in parent:
         # read the GMFs from a previous calc
         assert 'gmfs' not in oq.inputs, 'no gmfs_file when using --hc!'
         oqp = parent['oqparam']
         if oqp.investigation_time != oq.investigation_time:
             raise ValueError(
                 'The parent calculation was using investigation_time=%s'
                 ' != %s' % (oqp.investigation_time, oq.investigation_time))
         if oqp.minimum_intensity != oq.minimum_intensity:
             raise ValueError(
                 'The parent calculation was using minimum_intensity=%s'
                 ' != %s' % (oqp.minimum_intensity, oq.minimum_intensity))
         # sorting the eids is essential to get the epsilons in the right
         # order (i.e. consistent with the one used in ebr from ruptures)
         self.eids = sorted(parent['events']['eid'])
         self.datastore['csm_info'] = parent['csm_info']
         self.rlzs_assoc = parent['csm_info'].get_rlzs_assoc()
         self.R = len(self.rlzs_assoc.realizations)
     else:  # read the GMFs from a file
         with self.monitor('reading GMFs', measuremem=True):
             fname = oq.inputs['gmfs']
             sids = self.sitecol.complete.sids
             if fname.endswith('.xml'):  # old approach
                 self.eids, self.R = base.get_gmfs(self)
             else:  # import csv
                 self.eids, self.R, self.gmdata = base.import_gmfs(
                     self.datastore, fname, sids)
                 event_based.save_gmdata(self, self.R)
     self.E = len(self.eids)
     eps = self.epsilon_getter()()
     self.riskinputs = self.build_riskinputs('gmf', eps, self.E)
     self.param['gmf_ebrisk'] = True
     self.param['insured_losses'] = oq.insured_losses
     self.param['avg_losses'] = oq.avg_losses
     self.param['ses_ratio'] = oq.ses_ratio
     self.param['asset_loss_table'] = oq.asset_loss_table
     self.param['elt_dt'] = numpy.dtype([('eid', U64), ('rlzi', U16),
                                         ('loss', (F32,
                                                   (self.L * self.I, )))])
     self.taskno = 0
     self.start = 0
     avg_losses = self.oqparam.avg_losses
     if avg_losses:
         self.dset = self.datastore.create_dset(
             'avg_losses-rlzs', F32, (self.A, self.R, self.L * self.I))
     self.agglosses = numpy.zeros((self.E, self.R, self.L * self.I), F32)
     self.vals = self.assetcol.values()
     self.num_losses = numpy.zeros((self.A, self.R), U32)
     if oq.asset_loss_table:
         # save all_loss_ratios
         self.alr_nbytes = 0
         self.indices = collections.defaultdict(list)  # sid -> pairs