Beispiel #1
0
    def execute(self):
        if self.oqparam.hazard_calculation_id:

            def saving_sources_by_task(allargs, dstore):
                return allargs
        else:
            from openquake.calculators.classical import saving_sources_by_task
        self.gmdata = {}
        self.offset = 0
        self.gmf_size = 0
        self.indices = collections.defaultdict(list)  # sid, idx -> indices
        acc = self.zerodict()
        with self.monitor('managing sources', autoflush=True):
            allargs = self.gen_args(self.monitor('classical'))
            iterargs = saving_sources_by_task(allargs, self.datastore)
            if isinstance(allargs, list):
                # there is a trick here: if the arguments are known
                # (a list, not an iterator), keep them as a list
                # then the Starmap will understand the case of a single
                # argument tuple and it will run in core the task
                iterargs = list(iterargs)
            if self.oqparam.ground_motion_fields is False:
                logging.info('Generating ruptures only')
            ires = parallel.Starmap(self.core_task.__func__,
                                    iterargs).submit_all()
        acc = ires.reduce(self.agg_dicts, acc)
        if self.oqparam.hazard_calculation_id is None:
            with self.monitor('store source_info', autoflush=True):
                self.store_source_info(self.csm.infos, acc)
        self.check_overflow()  # check the number of events
        base.save_gmdata(self, self.R)
        if self.indices:
            N = len(self.sitecol.complete)
            logging.info('Saving gmf_data/indices')
            with self.monitor('saving gmf_data/indices',
                              measuremem=True,
                              autoflush=True):
                dset = self.datastore.create_dset('gmf_data/indices',
                                                  hdf5.vuint32,
                                                  shape=(N, 2),
                                                  fillvalue=None)
                for sid in self.sitecol.complete.sids:
                    dset[sid, 0] = self.indices[sid, 0]
                    dset[sid, 1] = self.indices[sid, 1]
        elif (self.oqparam.ground_motion_fields
              and 'ucerf' not in self.oqparam.calculation_mode):
            raise RuntimeError('No GMFs were generated, perhaps they were '
                               'all below the minimum_intensity threshold')
        return acc
Beispiel #2
0
    def post_execute(self, result):
        """
        Compute stats for the aggregated distributions and save
        the results on the datastore.
        """
        dstates = self.riskmodel.damage_states
        ltypes = self.riskmodel.loss_types
        L = len(ltypes)
        R = len(self.rlzs_assoc.realizations)
        D = len(dstates)
        N = len(self.assetcol)
        E = self.oqparam.number_of_ground_motion_fields

        # damage distributions
        dt_list = []
        for ltype in ltypes:
            dt_list.append(
                (ltype, numpy.dtype([('mean', (F32, D)),
                                     ('stddev', (F32, D))])))
        multi_stat_dt = numpy.dtype(dt_list)
        d_asset = numpy.zeros((N, R, L, 2, D), F32)
        for (l, r, a, stat) in result['d_asset']:
            d_asset[a, r, l] = stat
        self.datastore['dmg_by_asset'] = dist_by_asset(
            d_asset, multi_stat_dt, self.assetcol.array['number'])
        dmg_dt = [(ds, F32) for ds in self.riskmodel.damage_states]
        d_event = numpy.zeros((E, R, L), dmg_dt)
        for d, ds in enumerate(self.riskmodel.damage_states):
            d_event[ds] = result['d_event'][:, :, :, d]
        self.datastore['dmg_by_event'] = d_event

        # consequence distributions
        if result['c_asset']:
            dtlist = [('eid', U64), ('rlzi', U16), ('loss', (F32, L))]
            stat_dt = numpy.dtype([('mean', F32), ('stddev', F32)])
            c_asset = numpy.zeros((N, R, L), stat_dt)
            for (l, r, a, stat) in result['c_asset']:
                c_asset[a, r, l] = stat
            multi_stat_dt = self.oqparam.loss_dt(stat_dt)
            self.datastore['losses_by_asset'] = c_asset
            self.datastore['losses_by_event'] = numpy.fromiter(
                ((eid, rlzi, F32(result['c_event'][eid, rlzi]))
                 for rlzi in range(R) for eid in range(E)), dtlist)

        # save gmdata
        self.gmdata = result['gmdata']
        for arr in self.gmdata.values():
            arr[-2] = self.oqparam.number_of_ground_motion_fields  # events
        base.save_gmdata(self, R)
    def save_results(self, allres, num_rlzs):
        """
        :param allres: an iterable of result iterators
        :param num_rlzs: the total number of realizations
        :returns: the total number of events
        """
        oq = self.oqparam
        self.A = len(self.assetcol)
        if oq.asset_loss_table:
            # save all_loss_ratios
            self.alr_nbytes = 0
            self.indices = collections.defaultdict(list)  # sid -> pairs

        if oq.avg_losses:
            self.dset = self.datastore.create_dset(
                'avg_losses-rlzs', F32, (self.A, num_rlzs, self.L * self.I))

        num_events = collections.Counter()
        self.gmdata = AccumDict(accum=numpy.zeros(len(oq.imtls) + 2, F32))
        self.taskno = 0
        self.start = 0
        self.num_losses = numpy.zeros((self.A, num_rlzs), U32)
        for res in allres:
            start, stop = res.rlz_slice.start, res.rlz_slice.stop
            for dic in res:
                for r, arr in dic.pop('gmdata').items():
                    self.gmdata[start + r] += arr
                self.save_losses(dic, start)
            logging.debug(
                'Saving results for source model #%d, realizations %d:%d',
                res.sm_id + 1, start, stop)
            if hasattr(res, 'eff_ruptures'):  # for UCERF
                self.eff_ruptures += res.eff_ruptures
            if hasattr(res, 'ruptures_by_grp'):  # for UCERF
                save_ruptures(self, res.ruptures_by_grp)
            elif hasattr(res, 'events_by_grp'):  # for UCERF
                for grp_id in res.events_by_grp:
                    events = res.events_by_grp[grp_id]
                    self.datastore.extend('events', events)
            num_events[res.sm_id] += res.num_events
        if 'all_loss_ratios' in self.datastore:
            self.datastore['all_loss_ratios/num_losses'] = self.num_losses
            self.datastore.set_attrs(
                'all_loss_ratios/num_losses', nbytes=self.num_losses.nbytes)
        del self.num_losses
        base.save_gmdata(self, num_rlzs)
        return num_events
Beispiel #4
0
 def execute(self):
     oq = self.oqparam
     self.gmdata = {}
     self.offset = 0
     self.indices = collections.defaultdict(list)  # sid, idx -> indices
     self.min_iml = self.get_min_iml(oq)
     param = dict(oqparam=oq,
                  min_iml=self.min_iml,
                  save_ruptures=oq.save_ruptures,
                  gmf=oq.ground_motion_fields,
                  truncation_level=oq.truncation_level,
                  imtls=oq.imtls,
                  filter_distance=oq.filter_distance,
                  ses_per_logic_tree_path=oq.ses_per_logic_tree_path)
     if oq.hazard_calculation_id:  # from ruptures
         assert oq.ground_motion_fields, 'must be True!'
         self.datastore.parent = datastore.read(oq.hazard_calculation_id)
         iterargs = self.from_ruptures(param)
     else:  # from sources
         iterargs = self.from_sources(param)
         if oq.ground_motion_fields is False:
             for args in iterargs:  # store the ruptures/events
                 pass
             return {}
     acc = parallel.Starmap(self.core_task.__func__, iterargs,
                            self.monitor()).reduce(self.agg_dicts,
                                                   self.zerodict())
     self.check_overflow()  # check the number of events
     base.save_gmdata(self, self.R)
     if self.indices:
         N = len(self.sitecol.complete)
         logging.info('Saving gmf_data/indices')
         with self.monitor('saving gmf_data/indices',
                           measuremem=True,
                           autoflush=True):
             self.datastore['gmf_data/imts'] = ' '.join(oq.imtls)
             dset = self.datastore.create_dset('gmf_data/indices',
                                               hdf5.vuint32,
                                               shape=(N, 2),
                                               fillvalue=None)
             for sid in self.sitecol.complete.sids:
                 dset[sid, 0] = self.indices[sid, 0]
                 dset[sid, 1] = self.indices[sid, 1]
     elif (oq.ground_motion_fields and 'ucerf' not in oq.calculation_mode):
         raise RuntimeError('No GMFs were generated, perhaps they were '
                            'all below the minimum_intensity threshold')
     return acc
Beispiel #5
0
    def execute(self):
        """
        Run in parallel `core_task(sources, sitecol, monitor)`, by
        parallelizing on the ruptures according to their weight and
        tectonic region type.
        """
        oq = self.oqparam
        calc.check_overflow(self)

        self.csm_info = self.datastore['csm_info']
        self.sm_id = {
            tuple(sm.path): sm.ordinal
            for sm in self.csm_info.source_models
        }
        L = len(oq.imtls.array)
        R = self.datastore['csm_info'].get_num_rlzs()
        self.gmdata = {}
        self.offset = 0
        self.indices = collections.defaultdict(list)  # sid -> indices
        ires = parallel.Starmap(self.core_task.__func__,
                                self.gen_args()).submit_all()
        if self.precalc and self.precalc.result:
            # remove the ruptures in memory to save memory
            self.precalc.result.clear()
        acc = ires.reduce(self.combine_pmaps_and_save_gmfs,
                          {r: ProbabilityMap(L)
                           for r in range(R)})
        base.save_gmdata(self, R)
        if self.indices:
            logging.info('Saving gmf_data/indices')
            with self.monitor('saving gmf_data/indices',
                              measuremem=True,
                              autoflush=True):
                self.datastore.save_vlen('gmf_data/indices', [
                    numpy.array(self.indices[sid], indices_dt)
                    for sid in self.sitecol.complete.sids
                ])
        else:
            raise RuntimeError('No GMFs were generated, perhaps they were '
                               'all below the minimum_intensity threshold')
        return acc
    def save_results(self, allres, num_rlzs):
        """
        :param allres: an iterable of result iterators
        :param num_rlzs: the total number of realizations
        :returns: the total number of events
        """
        oq = self.oqparam
        self.A = len(self.assetcol)
        if oq.avg_losses:
            self.dset = self.datastore.create_dset(
                'avg_losses-rlzs', F32, (self.A, num_rlzs, self.L * self.I))

        num_events = collections.Counter()
        self.gmdata = AccumDict(accum=numpy.zeros(len(oq.imtls) + 1, F32))
        self.taskno = 0
        self.start = 0
        for res in allres:
            start, stop = res.rlz_slice.start, res.rlz_slice.stop
            for dic in res:
                for r, arr in dic.pop('gmdata').items():
                    self.gmdata[start + r] += arr
                self.save_losses(dic, start)
            logging.debug(
                'Saving results for source model #%d, realizations %d:%d',
                res.sm_id + 1, start, stop)
            if hasattr(res, 'eff_ruptures'):
                self.eff_ruptures += res.eff_ruptures
            if hasattr(res, 'ruptures_by_grp'):
                for ruptures in res.ruptures_by_grp.values():
                    save_ruptures(self, ruptures)
            elif hasattr(res, 'events_by_grp'):
                for grp_id in res.events_by_grp:
                    events = res.events_by_grp[grp_id]
                    self.datastore.extend('events', events)
            num_events[res.sm_id] += res.num_events
        base.save_gmdata(self, num_rlzs)
        return num_events