コード例 #1
0
    def execute(self):
        """
        Run in parallel `core_task(sources, sitecol, monitor)`, by
        parallelizing on the sources according to their weight and
        tectonic region type.
        """
        monitor = self.monitor(self.core_task.__name__)
        oq = self.oqparam
        acc = self.zerodict()
        self.nsites = []  # used in agg_dicts
        param = dict(imtls=oq.imtls, truncation_level=oq.truncation_level,
                     filter_distance=oq.filter_distance)
        for sm in self.csm.source_models:  # one branch at the time
            [grp] = sm.src_groups
            gsims = self.csm.info.get_gsims(sm.ordinal)
            acc = parallel.Starmap.apply(
                classical, (grp, self.src_filter, gsims, param, monitor),
                weight=operator.attrgetter('weight'),
                concurrent_tasks=oq.concurrent_tasks,
            ).reduce(self.agg_dicts, acc)
            ucerf = grp.sources[0].orig
            logging.info('Getting background sources from %s', ucerf.source_id)
            srcs = ucerf.get_background_sources(self.src_filter)
            for src in srcs:
                self.csm.infos[src.source_id] = source.SourceInfo(src)
            acc = parallel.Starmap.apply(
                classical, (srcs, self.src_filter, gsims, param, monitor),
                weight=operator.attrgetter('weight'),
                concurrent_tasks=oq.concurrent_tasks,
            ).reduce(self.agg_dicts, acc)

        with self.monitor('store source_info', autoflush=True):
            self.store_source_info(self.csm.infos, acc)
        return acc  # {grp_id: pmap}
コード例 #2
0
    def execute(self):
        """
        Run in parallel `core_task(sources, sitecol, monitor)`, by
        parallelizing on the sources according to their weight and
        tectonic region type.
        """
        monitor = self.monitor(self.core_task.__name__)
        monitor.oqparam = oq = self.oqparam
        self.src_filter = SourceFilter(self.sitecol, oq.maximum_distance)
        self.nsites = []
        acc = AccumDict({
            grp_id: ProbabilityMap(len(oq.imtls.array), len(gsims))
            for grp_id, gsims in self.gsims_by_grp.items()
        })
        acc.calc_times = {}
        acc.eff_ruptures = AccumDict()  # grp_id -> eff_ruptures
        acc.bb_dict = {}  # just for API compatibility
        param = dict(imtls=oq.imtls, truncation_level=oq.truncation_level)
        for sm in self.csm.source_models:  # one branch at the time
            grp_id = sm.ordinal
            gsims = self.gsims_by_grp[grp_id]
            [[ucerf_source]] = sm.src_groups
            ucerf_source.nsites = len(self.sitecol)
            self.csm.infos[ucerf_source.source_id] = source.SourceInfo(
                ucerf_source)
            logging.info('Getting the background point sources')
            bckgnd_sources = ucerf_source.get_background_sources(
                self.src_filter)

            # since there are two kinds of tasks (background and rupture_set)
            # we divide the concurrent_tasks parameter by 2;
            # notice the "or 1" below, to avoid issues when
            # self.oqparam.concurrent_tasks is 0 or 1
            ct2 = (self.oqparam.concurrent_tasks // 2) or 1

            # parallelize on the background sources, small tasks
            args = (bckgnd_sources, self.src_filter, gsims, param, monitor)
            bg_res = parallel.Starmap.apply(classical,
                                            args,
                                            name='background_sources_%d' %
                                            grp_id,
                                            concurrent_tasks=ct2)

            # parallelize by rupture subsets
            rup_sets = numpy.arange(ucerf_source.num_ruptures)
            taskname = 'ucerf_classical_%d' % grp_id
            acc = parallel.Starmap.apply(
                ucerf_classical,
                (rup_sets, ucerf_source, self.src_filter, gsims, monitor),
                concurrent_tasks=ct2,
                name=taskname).reduce(self.agg_dicts, acc)

            # compose probabilities from background sources
            for pmap in bg_res:
                acc[grp_id] |= pmap[grp_id]

        with self.monitor('store source_info', autoflush=True):
            self.store_source_info(self.csm.infos, acc)
        return acc  # {grp_id: pmap}
コード例 #3
0
 def pre_execute(self):
     super().pre_execute()
     self.src_filter = UcerfFilter(
         self.sitecol, self.oqparam.maximum_distance)
     for sm in self.csm.source_models:  # one branch at the time
         [grp] = sm.src_groups
         for src in grp:
             self.csm.infos[src.source_id] = source.SourceInfo(src)
             grp.tot_ruptures += src.num_ruptures
コード例 #4
0
 def pre_execute(self):
     super().pre_execute()
     self.src_filter = UcerfFilter(self.sitecol,
                                   self.oqparam.maximum_distance)
     for sm in self.csm.source_models:  # one branch at the time
         [grp] = sm.src_groups
         ucerf = grp.sources[0].orig
         logging.info('Getting background sources from %s', ucerf.source_id)
         grp.sources.extend(ucerf.get_background_sources(self.src_filter))
         for src in grp:
             self.csm.infos[src.source_id] = source.SourceInfo(src)
             grp.tot_ruptures += src.num_ruptures
コード例 #5
0
 def agg(self, acc, val):
     """
     Aggregated the ruptures and the calculation times
     """
     for trt_id in val:
         ltbrid, dt = val.calc_times[trt_id]
         info = source.SourceInfo(
             trt_id,
             ltbrid,
             source_class=UCERFSESControl.__class__.__name__,
             weight=1,
             sources=1,
             filter_time=0,
             split_time=0,
             calc_time=dt)
         self.infos.append(info)
     return acc + val