Beispiel #1
0
 def execute(self):
     """
     Run in parallel `core_func(sources, sitecol, monitor)`, by
     parallelizing on the sources according to their weight and
     tectonic region type.
     """
     monitor = self.monitor.new(self.core_func.__name__)
     monitor.oqparam = self.oqparam
     sources = self.csm.get_sources()
     zc = zero_curves(len(self.sitecol.complete), self.oqparam.imtls)
     zerodict = AccumDict((key, zc) for key in self.rlzs_assoc)
     zerodict.calc_times = []
     zerodict.bb_dict = {
         (smodel.ordinal, site.id): BoundingBox(smodel.ordinal, site.id)
         for site in self.sitecol
         for smodel in self.csm.source_models
     } if self.oqparam.poes_disagg else {}
     curves_by_trt_gsim = parallel.apply_reduce(
         self.core_func.__func__,
         (sources, self.sitecol, 0, self.rlzs_assoc, monitor),
         agg=self.agg_dicts, acc=zerodict,
         concurrent_tasks=self.oqparam.concurrent_tasks,
         weight=operator.attrgetter('weight'),
         key=operator.attrgetter('trt_model_id'))
     store_source_chunks(self.datastore)
     return curves_by_trt_gsim
    def execute(self):
        """
        Run in parallel `core_task(sources, sitecol, monitor)`, by
        parallelizing on the sources according to their weight and
        tectonic region type.
        """
        monitor = self.monitor(self.core_task.__name__)
        monitor.oqparam = oq = self.oqparam
        self.src_filter = SourceFilter(self.sitecol, oq.maximum_distance)
        self.nsites = []
        acc = AccumDict({
            grp_id: ProbabilityMap(len(oq.imtls.array), len(gsims))
            for grp_id, gsims in self.gsims_by_grp.items()
        })
        acc.calc_times = {}
        acc.eff_ruptures = AccumDict()  # grp_id -> eff_ruptures
        acc.bb_dict = {}  # just for API compatibility
        param = dict(imtls=oq.imtls, truncation_level=oq.truncation_level)
        for sm in self.csm.source_models:  # one branch at the time
            grp_id = sm.ordinal
            gsims = self.gsims_by_grp[grp_id]
            [[ucerf_source]] = sm.src_groups
            ucerf_source.nsites = len(self.sitecol)
            self.csm.infos[ucerf_source.source_id] = source.SourceInfo(
                ucerf_source)
            logging.info('Getting the background point sources')
            bckgnd_sources = ucerf_source.get_background_sources(
                self.src_filter)

            # since there are two kinds of tasks (background and rupture_set)
            # we divide the concurrent_tasks parameter by 2;
            # notice the "or 1" below, to avoid issues when
            # self.oqparam.concurrent_tasks is 0 or 1
            ct2 = (self.oqparam.concurrent_tasks // 2) or 1

            # parallelize on the background sources, small tasks
            args = (bckgnd_sources, self.src_filter, gsims, param, monitor)
            bg_res = parallel.Starmap.apply(classical,
                                            args,
                                            name='background_sources_%d' %
                                            grp_id,
                                            concurrent_tasks=ct2)

            # parallelize by rupture subsets
            rup_sets = numpy.arange(ucerf_source.num_ruptures)
            taskname = 'ucerf_classical_%d' % grp_id
            acc = parallel.Starmap.apply(
                ucerf_classical,
                (rup_sets, ucerf_source, self.src_filter, gsims, monitor),
                concurrent_tasks=ct2,
                name=taskname).reduce(self.agg_dicts, acc)

            # compose probabilities from background sources
            for pmap in bg_res:
                acc[grp_id] |= pmap[grp_id]

        with self.monitor('store source_info', autoflush=True):
            self.store_source_info(self.csm.infos, acc)
        return acc  # {grp_id: pmap}
Beispiel #3
0
 def zerodict(self):
     """
     Initial accumulator, a dictionary (trt_id, gsim) -> curves
     """
     zc = zero_curves(len(self.sitecol.complete), self.oqparam.imtls)
     zd = AccumDict((key, zc) for key in self.rlzs_assoc)
     zd.calc_times = []
     zd.eff_ruptures = AccumDict()  # trt_id -> eff_ruptures
     zd.bb_dict = {
         (smodel.ordinal, site.id): BoundingBox(smodel.ordinal, site.id)
         for site in self.sitecol for smodel in self.csm.source_models
     } if self.oqparam.poes_disagg else {}
     return zd
Beispiel #4
0
 def zerodict(self):
     """
     Initial accumulator, a dictionary (trt_id, gsim) -> curves
     """
     zc = zero_curves(len(self.sitecol.complete), self.oqparam.imtls)
     zd = AccumDict((key, zc) for key in self.rlzs_assoc)
     zd.calc_times = []
     zd.eff_ruptures = AccumDict()  # trt_id -> eff_ruptures
     zd.bb_dict = {
         (smodel.ordinal, site.id): BoundingBox(smodel.ordinal, site.id)
         for site in self.sitecol
         for smodel in self.csm.source_models
     } if self.oqparam.poes_disagg else {}
     return zd
Beispiel #5
0
 def zerodict(self):
     """
     Initial accumulator, a dict grp_id -> ProbabilityMap(L, G)
     """
     zd = AccumDict()
     num_levels = len(self.oqparam.imtls.array)
     for grp in self.csm.src_groups:
         num_gsims = len(self.rlzs_assoc.gsims_by_grp_id[grp.id])
         zd[grp.id] = ProbabilityMap(num_levels, num_gsims)
     zd.calc_times = []
     zd.eff_ruptures = AccumDict()  # grp_id -> eff_ruptures
     zd.bb_dict = BBdict()
     if self.oqparam.poes_disagg or self.oqparam.iml_disagg:
         for sid in self.sitecol.sids:
             for smodel in self.csm.source_models:
                 zd.bb_dict[smodel.ordinal,
                            sid] = BoundingBox(smodel.ordinal, sid)
     return zd
Beispiel #6
0
 def zerodict(self):
     """
     Initial accumulator, a dict grp_id -> ProbabilityMap(L, G)
     """
     zd = AccumDict()
     num_levels = len(self.oqparam.imtls.array)
     for grp in self.csm.src_groups:
         num_gsims = len(self.rlzs_assoc.gsims_by_grp_id[grp.id])
         zd[grp.id] = ProbabilityMap(num_levels, num_gsims)
     zd.calc_times = []
     zd.eff_ruptures = AccumDict()  # grp_id -> eff_ruptures
     zd.bb_dict = BBdict()
     if self.oqparam.poes_disagg:
         for sid in self.sitecol.sids:
             for smodel in self.csm.source_models:
                 zd.bb_dict[smodel.ordinal, sid] = BoundingBox(
                     smodel.ordinal, sid)
     return zd
Beispiel #7
0
    def execute(self):
        """
        Run in parallel `core_task(sources, sitecol, monitor)`, by
        parallelizing on the sources according to their weight and
        tectonic region type.
        """
        monitor = self.monitor.new(self.core_task.__name__)
        monitor.oqparam = oq = self.oqparam
        ucerf_source = self.src_group.sources[0]
        max_dist = oq.maximum_distance[DEFAULT_TRT]
        acc = AccumDict({
            grp_id: ProbabilityMap(len(oq.imtls.array), len(gsims))
            for grp_id, gsims in self.rlzs_assoc.gsims_by_grp_id.items()})
        acc.calc_times = []
        acc.eff_ruptures = AccumDict()  # grp_id -> eff_ruptures
        acc.bb_dict = {}

        if len(self.csm) > 1:
            # when multiple branches, parallelise by branch
            branches = [br.value for br in self.smlt.branches.values()]
            rup_res = parallel.starmap(
                ucerf_classical_hazard_by_branch,
                self.gen_args(branches, ucerf_source, monitor)).submit_all()
        else:
            # single branch
            gsims = self.rlzs_assoc.gsims_by_grp_id[0]
            [(branch_id, branch)] = self.smlt.branches.items()
            branchname = branch.value
            ucerf_source.src_group_id = 0
            ucerf_source.weight = 1
            ucerf_source.nsites = len(self.sitecol)
            self.infos[0, ucerf_source.source_id] = source.SourceInfo(
                ucerf_source)
            logging.info('Getting the background point sources')
            with self.monitor('getting background sources', autoflush=True):
                ucerf_source.build_idx_set()
                background_sids = ucerf_source.get_background_sids(
                    self.sitecol, max_dist)
                bckgnd_sources = ucerf_source.get_background_sources(
                    background_sids)

            # parallelize on the background sources, small tasks
            args = (bckgnd_sources, self.sitecol, oq.imtls,
                    gsims, self.oqparam.truncation_level,
                    'SourceSitesFilter', max_dist, (), monitor)
            bg_res = parallel.apply(
                pmap_from_grp, args,
                concurrent_tasks=self.oqparam.concurrent_tasks).submit_all()

            # parallelize by rupture subsets
            tasks = self.oqparam.concurrent_tasks * 2  # they are big tasks
            rup_sets = ucerf_source.get_rupture_indices(branchname)
            rup_res = parallel.apply(
                ucerf_classical_hazard_by_rupture_set,
                (rup_sets, branchname, ucerf_source, self.src_group.id,
                 self.sitecol, gsims, monitor),
                concurrent_tasks=tasks).submit_all()

            # compose probabilities from background sources
            for pmap in bg_res:
                acc[0] |= pmap
            self.save_data_transfer(bg_res)

        pmap_by_grp_id = functools.reduce(self.agg_dicts, rup_res, acc)
        with self.monitor('store source_info', autoflush=True):
            self.store_source_info(self.infos)
            self.save_data_transfer(rup_res)
        self.datastore['csm_info'] = self.csm.info
        self.rlzs_assoc = self.csm.info.get_rlzs_assoc(
            functools.partial(self.count_eff_ruptures, pmap_by_grp_id))
        self.datastore['csm_info'] = self.csm.info
        return pmap_by_grp_id