Beispiel #1
0
    def execute(self):
        """
        Run in parallel `core_task(sources, sitecol, monitor)`, by
        parallelizing on the ruptures according to their weight and
        tectonic region type.
        """
        oq = self.oqparam
        if not oq.hazard_curves_from_gmfs and not oq.ground_motion_fields:
            return
        if self.oqparam.ground_motion_fields:
            calc.check_overflow(self)

        self.csm_info = self.datastore['csm_info']
        self.sm_id = {tuple(sm.path): sm.ordinal
                      for sm in self.csm_info.source_models}
        L = len(oq.imtls.array)
        R = len(self.datastore['realizations'])
        self.gmdata = {}
        self.offset = 0
        self.indices = collections.defaultdict(list)  # sid -> indices
        acc = parallel.Starmap(
            self.core_task.__func__, self.gen_args()
        ).reduce(self.combine_pmaps_and_save_gmfs, {
            r: ProbabilityMap(L) for r in range(R)})
        save_gmdata(self, R)
        if self.indices:
            logging.info('Saving gmf_data/indices')
            with self.monitor('saving gmf_data/indices', measuremem=True,
                              autoflush=True):
                self.datastore.save_vlen(
                    'gmf_data/indices',
                    [numpy.array(self.indices[sid], indices_dt)
                     for sid in self.sitecol.complete.sids])
        return acc
Beispiel #2
0
    def execute(self):
        """
        Run in parallel `core_task(sources, sitecol, monitor)`, by
        parallelizing on the ruptures according to their weight and
        tectonic region type.
        """
        oq = self.oqparam
        if not oq.hazard_curves_from_gmfs and not oq.ground_motion_fields:
            return
        self.sesruptures = []
        if self.precalc:  # the ruptures are already in memory
            for grp_id, sesruptures in self.precalc.result.items():
                for sr in sesruptures:
                    self.sesruptures.append(sr)
        else:  # read the ruptures from the datastore
            for serial in self.datastore['sescollection']:
                sr = self.datastore['sescollection/' + serial]
                self.sesruptures.append(sr)
        self.sesruptures.sort(key=operator.attrgetter('serial'))
        if self.oqparam.ground_motion_fields:
            calc.check_overflow(self)

        L = len(oq.imtls.array)
        res = parallel.starmap(
            self.core_task.__func__, self.gen_args(self.sesruptures)
        ).submit_all()
        acc = functools.reduce(self.combine_pmaps_and_save_gmfs, res, {
            rlz.ordinal: ProbabilityMap(L, 1)
            for rlz in self.rlzs_assoc.realizations})
        self.save_data_transfer(res)
        return acc
Beispiel #3
0
    def execute(self):
        """
        Run in parallel `core_task(sources, sitecol, monitor)`, by
        parallelizing on the ruptures according to their weight and
        tectonic region type.
        """
        oq = self.oqparam
        if not oq.hazard_curves_from_gmfs and not oq.ground_motion_fields:
            return
        if self.oqparam.ground_motion_fields:
            calc.check_overflow(self)

        with self.monitor('reading ruptures', autoflush=True):
            ruptures_by_grp = (self.precalc.result if self.precalc else
                               get_ruptures_by_grp(self.datastore.parent))

        self.csm_info = self.datastore['csm_info']
        self.sm_id = {
            tuple(sm.path): sm.ordinal
            for sm in self.csm_info.source_models
        }
        L = len(oq.imtls.array)
        rlzs = self.rlzs_assoc.realizations
        res = parallel.Starmap(self.core_task.__func__,
                               self.gen_args(ruptures_by_grp)).submit_all()
        self.gmdata = {}
        acc = res.reduce(self.combine_pmaps_and_save_gmfs,
                         {rlz.ordinal: ProbabilityMap(L, 1)
                          for rlz in rlzs})
        save_gmdata(self, len(rlzs))
        return acc
Beispiel #4
0
 def pre_execute(self):
     """
     Read the precomputed ruptures (or compute them on the fly)
     """
     super(EventBasedRiskCalculator, self).pre_execute()
     calc.check_overflow(self)
     if not self.riskmodel:  # there is no riskmodel, exit early
         self.execute = lambda: None
         self.post_execute = lambda result: None
         return
Beispiel #5
0
 def pre_execute(self):
     """
     Read the precomputed ruptures (or compute them on the fly)
     """
     super(EventBasedRiskCalculator, self).pre_execute()
     calc.check_overflow(self)
     if not self.riskmodel:  # there is no riskmodel, exit early
         self.execute = lambda: None
         self.post_execute = lambda result: None
         return
Beispiel #6
0
    def execute(self):
        if self.oqparam.hazard_calculation_id:

            def saving_sources_by_task(allargs, dstore):
                return allargs
        else:
            from openquake.calculators.classical import saving_sources_by_task
        self.gmdata = {}
        self.offset = 0
        self.gmf_size = 0
        self.indices = collections.defaultdict(list)  # sid, idx -> indices
        acc = self.zerodict()
        with self.monitor('managing sources', autoflush=True):
            allargs = self.gen_args(self.monitor('classical'))
            iterargs = saving_sources_by_task(allargs, self.datastore)
            if isinstance(allargs, list):
                # there is a trick here: if the arguments are known
                # (a list, not an iterator), keep them as a list
                # then the Starmap will understand the case of a single
                # argument tuple and it will run in core the task
                iterargs = list(iterargs)
            if self.oqparam.ground_motion_fields is False:
                logging.info('Generating ruptures only')
            acc = parallel.Starmap(self.core_task.__func__,
                                   iterargs).reduce(self.agg_dicts, acc)
        if self.oqparam.hazard_calculation_id is None:
            with self.monitor('store source_info', autoflush=True):
                self.store_source_info(self.csm.infos, acc)
        calc.check_overflow(self)
        base.save_gmdata(self, self.R)
        if self.indices:
            N = len(self.sitecol.complete)
            logging.info('Saving gmf_data/indices')
            with self.monitor('saving gmf_data/indices',
                              measuremem=True,
                              autoflush=True):
                dset = self.datastore.create_dset('gmf_data/indices',
                                                  hdf5.vuint32,
                                                  shape=(N, 2),
                                                  fillvalue=None)
                for sid in self.sitecol.complete.sids:
                    dset[sid, 0] = self.indices[sid, 0]
                    dset[sid, 1] = self.indices[sid, 1]
        elif (self.oqparam.ground_motion_fields
              and 'ucerf' not in self.oqparam.calculation_mode):
            raise RuntimeError('No GMFs were generated, perhaps they were '
                               'all below the minimum_intensity threshold')
        return acc
Beispiel #7
0
 def pre_execute(self):
     """
     Read the precomputed ruptures (or compute them on the fly) and
     prepare some empty files in the export directory to store the gmfs
     (if any). If there were pre-existing files, they will be erased.
     """
     super(EventBasedCalculator, self).pre_execute()
     rlzs_by_tr_id = self.rlzs_assoc.get_rlzs_by_grp_id()
     num_rlzs = {t: len(rlzs) for t, rlzs in rlzs_by_tr_id.items()}
     self.sesruptures = []
     for serial in self.datastore['sescollection']:
         sr = self.datastore['sescollection/' + serial]
         sr.set_weight(num_rlzs, {})
         self.sesruptures.append(sr)
     self.sesruptures.sort(key=operator.attrgetter('serial'))
     if self.oqparam.ground_motion_fields:
         calc.check_overflow(self)
         for rlz in self.rlzs_assoc.realizations:
             self.datastore.create_dset(
                 'gmf_data/%04d' % rlz.ordinal, calc.gmv_dt)
Beispiel #8
0
 def pre_execute(self):
     """
     Read the precomputed ruptures (or compute them on the fly) and
     prepare some empty files in the export directory to store the gmfs
     (if any). If there were pre-existing files, they will be erased.
     """
     super(EventBasedCalculator, self).pre_execute()
     rlzs_by_tr_id = self.rlzs_assoc.get_rlzs_by_trt_id()
     num_rlzs = {t: len(rlzs) for t, rlzs in rlzs_by_tr_id.items()}
     self.sesruptures = []
     for serial in self.datastore['sescollection']:
         sr = self.datastore['sescollection/' + serial]
         sr.set_weight(num_rlzs, {})
         self.sesruptures.append(sr)
     self.sesruptures.sort(key=operator.attrgetter('serial'))
     if self.oqparam.ground_motion_fields:
         calc.check_overflow(self)
         for rlz in self.rlzs_assoc.realizations:
             self.datastore.create_dset('gmf_data/%04d' % rlz.ordinal,
                                        calc.gmv_dt)
Beispiel #9
0
    def execute(self):
        """
        Run in parallel `core_task(sources, sitecol, monitor)`, by
        parallelizing on the ruptures according to their weight and
        tectonic region type.
        """
        oq = self.oqparam
        calc.check_overflow(self)

        self.csm_info = self.datastore['csm_info']
        self.sm_id = {
            tuple(sm.path): sm.ordinal
            for sm in self.csm_info.source_models
        }
        L = len(oq.imtls.array)
        R = self.datastore['csm_info'].get_num_rlzs()
        self.gmdata = {}
        self.offset = 0
        self.indices = collections.defaultdict(list)  # sid -> indices
        ires = parallel.Starmap(self.core_task.__func__,
                                self.gen_args()).submit_all()
        if self.precalc and self.precalc.result:
            # remove the ruptures in memory to save memory
            self.precalc.result.clear()
        acc = ires.reduce(self.combine_pmaps_and_save_gmfs,
                          {r: ProbabilityMap(L)
                           for r in range(R)})
        base.save_gmdata(self, R)
        if self.indices:
            logging.info('Saving gmf_data/indices')
            with self.monitor('saving gmf_data/indices',
                              measuremem=True,
                              autoflush=True):
                self.datastore.save_vlen('gmf_data/indices', [
                    numpy.array(self.indices[sid], indices_dt)
                    for sid in self.sitecol.complete.sids
                ])
        else:
            raise RuntimeError('No GMFs were generated, perhaps they were '
                               'all below the minimum_intensity threshold')
        return acc
Beispiel #10
0
 def execute(self):
     """
     Run in parallel `core_task(sources, sitecol, monitor)`, by
     parallelizing on the ruptures according to their weight and
     tectonic region type.
     """
     oq = self.oqparam
     if not oq.hazard_curves_from_gmfs and not oq.ground_motion_fields:
         return
     ruptures_by_grp = (self.precalc.result if self.precalc
                        else get_ruptures_by_grp(self.datastore.parent))
     if self.oqparam.ground_motion_fields:
         calc.check_overflow(self)
     self.sm_id = {sm.path: sm.ordinal
                   for sm in self.csm.info.source_models}
     L = len(oq.imtls.array)
     res = parallel.starmap(
         self.core_task.__func__, self.gen_args(ruptures_by_grp)
     ).submit_all()
     acc = functools.reduce(self.combine_pmaps_and_save_gmfs, res, {
         rlz.ordinal: ProbabilityMap(L, 1)
         for rlz in self.rlzs_assoc.realizations})
     self.save_data_transfer(res)
     return acc