Esempio n. 1
0
 def execute(self):
     """
     Run in parallel `core_task(sources, sitecol, monitor)`, by
     parallelizing on the sources according to their weight and
     tectonic region type.
     """
     oq = self.oqparam
     monitor = self.monitor.new(
         self.core_task.__name__,
         truncation_level=oq.truncation_level,
         imtls=oq.imtls,
         maximum_distance=oq.maximum_distance,
         disagg=oq.poes_disagg or oq.iml_disagg,
         ses_per_logic_tree_path=oq.ses_per_logic_tree_path,
         seed=oq.random_seed)
     with self.monitor('managing sources', autoflush=True):
         src_groups = list(self.csm.src_groups)
         iterargs = saving_sources_by_task(
             self.gen_args(src_groups, oq, monitor), self.datastore)
         res = parallel.starmap(
             self.core_task.__func__, iterargs).submit_all()
     acc = reduce(self.agg_dicts, res, self.zerodict())
     self.save_data_transfer(res)
     with self.monitor('store source_info', autoflush=True):
         self.store_source_info(self.infos)
     self.rlzs_assoc = self.csm.info.get_rlzs_assoc(
         partial(self.count_eff_ruptures, acc))
     self.datastore['csm_info'] = self.csm.info
     return acc
Esempio n. 2
0
 def execute(self):
     num_rlzs = len(self.rlzs_assoc.realizations)
     self.grp_trt = self.csm.info.grp_trt()
     allres = parallel.starmap(compute_losses, self.gen_args()).submit_all()
     num_events = self.save_results(allres, num_rlzs)
     self.save_data_transfer(allres)
     return num_events
Esempio n. 3
0
    def build_starmap(self, sm_id, ruptures_by_grp, sitecol,
                      assetcol, riskmodel, imts, trunc_level, correl_model,
                      min_iml, monitor):
        """
        :param sm_id: source model ordinal
        :param ruptures_by_grp: dictionary of ruptures by src_group_id
        :param sitecol: a SiteCollection instance
        :param assetcol: an AssetCollection instance
        :param riskmodel: a RiskModel instance
        :param imts: a list of Intensity Measure Types
        :param trunc_level: truncation level
        :param correl_model: correlation model
        :param min_iml: vector of minimum intensities, one per IMT
        :param monitor: a Monitor instance
        :returns: a pair (starmap, dictionary of attributes)
        """
        csm_info = self.csm_info.get_info(sm_id)
        grp_ids = sorted(csm_info.get_sm_by_grp())
        rlzs_assoc = csm_info.get_rlzs_assoc(
            count_ruptures=lambda grp: len(ruptures_by_grp.get(grp.id, [])))
        num_events = sum(ebr.multiplicity for grp in ruptures_by_grp
                         for ebr in ruptures_by_grp[grp])
        seeds = self.oqparam.random_seed + numpy.arange(num_events)

        allargs = []
        # prepare the risk inputs
        ruptures_per_block = self.oqparam.ruptures_per_block
        start = 0
        grp_trt = csm_info.grp_trt()
        ignore_covs = self.oqparam.ignore_covs
        for grp_id in grp_ids:
            for rupts in block_splitter(
                    ruptures_by_grp.get(grp_id, []), ruptures_per_block):
                if ignore_covs or not self.riskmodel.covs:
                    eps = None
                elif self.oqparam.asset_correlation:
                    eps = EpsilonMatrix1(num_events, self.oqparam.master_seed)
                else:
                    n_events = sum(ebr.multiplicity for ebr in rupts)
                    eps = EpsilonMatrix0(
                        len(self.assetcol), seeds[start: start + n_events])
                    start += n_events
                ri = riskinput.RiskInputFromRuptures(
                    grp_trt[grp_id], rlzs_assoc, imts, sitecol,
                    rupts, trunc_level, correl_model, min_iml, eps)
                allargs.append((ri, riskmodel, assetcol, monitor))

        self.vals = self.assetcol.values()
        taskname = '%s#%d' % (event_based_risk.__name__, sm_id + 1)
        smap = starmap(event_based_risk, allargs, name=taskname)
        attrs = dict(num_ruptures={
            sg_id: len(rupts) for sg_id, rupts in ruptures_by_grp.items()},
                     num_events=num_events,
                     num_rlzs=len(rlzs_assoc.realizations),
                     sm_id=sm_id)
        return smap, attrs
Esempio n. 4
0
 def test_spawn(self):
     all_data = [
         ('a', list(range(10))), ('b', list(range(20))),
         ('c', list(range(15)))]
     res = {key: parallel.starmap(get_length, [(data,)])
            for key, data in all_data}
     for key, val in res.items():
         res[key] = val.reduce()
     parallel.TaskManager.restart()
     self.assertEqual(res, {'a': {'n': 10}, 'c': {'n': 15}, 'b': {'n': 20}})
Esempio n. 5
0
 def execute(self):
     """
     Run the ucerf calculation
     """
     res = parallel.starmap(compute_events, self.gen_args()).submit_all()
     acc = self.zerodict()
     num_ruptures = {}
     for ruptures_by_grp in res:
         [(grp_id, ruptures)] = ruptures_by_grp.items()
         num_ruptures[grp_id] = len(ruptures)
         acc.calc_times.extend(ruptures_by_grp.calc_times[grp_id])
         self.save_events(ruptures_by_grp)
     self.save_data_transfer(res)
     with self.monitor('store source_info', autoflush=True):
         self.store_source_info(self.infos)
     self.datastore['csm_info'] = self.csm.info
     return num_ruptures
Esempio n. 6
0
    def execute(self):
        """
        Builds hcurves and stats from the stored PoEs
        """
        if 'poes' not in self.datastore:  # for short report
            return
        oq = self.oqparam
        rlzs = self.rlzs_assoc.realizations

        # initialize datasets
        N = len(self.sitecol)
        L = len(oq.imtls.array)
        attrs = dict(
            __pyclass__='openquake.hazardlib.probability_map.ProbabilityMap',
            sids=numpy.arange(N, dtype=numpy.uint32))
        if oq.individual_curves:
            for rlz in rlzs:
                self.datastore.create_dset(
                    'hcurves/rlz-%03d' % rlz.ordinal, F32,
                    (N, L, 1),  attrs=attrs)
        if oq.mean_hazard_curves:
            self.datastore.create_dset(
                'hcurves/mean', F32, (N, L, 1), attrs=attrs)
        for q in oq.quantile_hazard_curves:
            self.datastore.create_dset(
                'hcurves/quantile-%s' % q, F32, (N, L, 1), attrs=attrs)
        self.datastore.flush()

        logging.info('Building hazard curves')
        with self.monitor('submitting poes', autoflush=True):
            pmap_by_grp = {
                int(group_id): self.datastore['poes/' + group_id]
                for group_id in self.datastore['poes']}
            res = parallel.starmap(
                build_hcurves_and_stats,
                list(self.gen_args(pmap_by_grp))).submit_all()
        nbytes = reduce(self.save_hcurves, res, AccumDict())
        self.save_data_transfer(res)
        return nbytes
Esempio n. 7
0
 def execute(self):
     """
     Run in parallel `core_task(sources, sitecol, monitor)`, by
     parallelizing on the ruptures according to their weight and
     tectonic region type.
     """
     oq = self.oqparam
     if not oq.hazard_curves_from_gmfs and not oq.ground_motion_fields:
         return
     ruptures_by_grp = (self.precalc.result if self.precalc
                        else get_ruptures_by_grp(self.datastore.parent))
     if self.oqparam.ground_motion_fields:
         calc.check_overflow(self)
     self.sm_id = {sm.path: sm.ordinal
                   for sm in self.csm.info.source_models}
     L = len(oq.imtls.array)
     res = parallel.starmap(
         self.core_task.__func__, self.gen_args(ruptures_by_grp)
     ).submit_all()
     acc = functools.reduce(self.combine_pmaps_and_save_gmfs, res, {
         rlz.ordinal: ProbabilityMap(L, 1)
         for rlz in self.rlzs_assoc.realizations})
     self.save_data_transfer(res)
     return acc
Esempio n. 8
0
    def execute(self):
        """
        Run in parallel `core_task(sources, sitecol, monitor)`, by
        parallelizing on the sources according to their weight and
        tectonic region type.
        """
        monitor = self.monitor.new(self.core_task.__name__)
        monitor.oqparam = oq = self.oqparam
        ucerf_source = self.src_group.sources[0]
        self.src_filter = SourceFilter(self.sitecol, oq.maximum_distance)
        max_dist = oq.maximum_distance[DEFAULT_TRT]
        acc = AccumDict({
            grp_id: ProbabilityMap(len(oq.imtls.array), len(gsims))
            for grp_id, gsims in self.rlzs_assoc.gsims_by_grp_id.items()})
        acc.calc_times = []
        acc.eff_ruptures = AccumDict()  # grp_id -> eff_ruptures
        acc.bb_dict = {}

        if len(self.csm) > 1:
            # when multiple branches, parallelise by branch
            branches = [br.value for br in self.smlt.branches.values()]
            rup_res = parallel.starmap(
                ucerf_classical_hazard_by_branch,
                self.gen_args(branches, ucerf_source, monitor)).submit_all()
        else:
            # single branch
            gsims = self.rlzs_assoc.gsims_by_grp_id[0]
            [(branch_id, branch)] = self.smlt.branches.items()
            branchname = branch.value
            ucerf_source.src_group_id = 0
            ucerf_source.weight = 1
            ucerf_source.nsites = len(self.sitecol)
            self.infos[0, ucerf_source.source_id] = source.SourceInfo(
                ucerf_source)
            logging.info('Getting the background point sources')
            with self.monitor('getting background sources', autoflush=True):
                ucerf_source.build_idx_set()
                background_sids = ucerf_source.get_background_sids(
                    self.sitecol, max_dist)
                bckgnd_sources = ucerf_source.get_background_sources(
                    background_sids)

            # parallelize on the background sources, small tasks
            args = (bckgnd_sources, self.src_filter, oq.imtls,
                    gsims, self.oqparam.truncation_level, (), monitor)
            bg_res = parallel.apply(
                pmap_from_grp, args,
                concurrent_tasks=self.oqparam.concurrent_tasks).submit_all()

            # parallelize by rupture subsets
            tasks = self.oqparam.concurrent_tasks * 2  # they are big tasks
            rup_sets = ucerf_source.get_rupture_indices(branchname)
            rup_res = parallel.apply(
                ucerf_classical_hazard_by_rupture_set,
                (rup_sets, branchname, ucerf_source, self.src_group.id,
                 self.src_filter, gsims, monitor),
                concurrent_tasks=tasks).submit_all()

            # compose probabilities from background sources
            for pmap in bg_res:
                acc[0] |= pmap
            self.save_data_transfer(bg_res)

        pmap_by_grp_id = functools.reduce(self.agg_dicts, rup_res, acc)
        with self.monitor('store source_info', autoflush=True):
            self.store_source_info(self.infos)
            self.save_data_transfer(rup_res)
        self.datastore['csm_info'] = self.csm.info
        self.rlzs_assoc = self.csm.info.get_rlzs_assoc(
            functools.partial(self.count_eff_ruptures, pmap_by_grp_id))
        self.datastore['csm_info'] = self.csm.info
        return pmap_by_grp_id
Esempio n. 9
0
    def full_disaggregation(self):
        """
        Run the disaggregation phase after hazard curve finalization.
        """
        oq = self.oqparam
        tl = self.oqparam.truncation_level
        bb_dict = self.datastore["bb_dict"]
        sitecol = self.sitecol
        mag_bin_width = self.oqparam.mag_bin_width
        eps_edges = numpy.linspace(-tl, tl, self.oqparam.num_epsilon_bins + 1)
        logging.info("%d epsilon bins from %s to %s", len(eps_edges) - 1, min(eps_edges), max(eps_edges))

        self.bin_edges = {}
        curves_dict = {sid: self.get_curves(sid) for sid in sitecol.sids}
        all_args = []
        num_trts = sum(len(sm.src_groups) for sm in self.csm.source_models)
        nblocks = math.ceil(oq.concurrent_tasks / num_trts)
        for smodel in self.csm.source_models:
            sm_id = smodel.ordinal
            trt_names = tuple(mod.trt for mod in smodel.src_groups)
            max_mag = max(mod.max_mag for mod in smodel.src_groups)
            min_mag = min(mod.min_mag for mod in smodel.src_groups)
            mag_edges = mag_bin_width * numpy.arange(
                int(numpy.floor(min_mag / mag_bin_width)), int(numpy.ceil(max_mag / mag_bin_width) + 1)
            )
            logging.info("%d mag bins from %s to %s", len(mag_edges) - 1, min_mag, max_mag)
            for src_group in smodel.src_groups:
                if src_group.id not in self.rlzs_assoc.gsims_by_grp_id:
                    continue  # the group has been filtered away
                for sid, site in zip(sitecol.sids, sitecol):
                    curves = curves_dict[sid]
                    if not curves:
                        continue  # skip zero-valued hazard curves
                    bb = bb_dict[sm_id, sid]
                    if not bb:
                        logging.info("location %s was too far, skipping disaggregation", site.location)
                        continue

                    dist_edges, lon_edges, lat_edges = bb.bins_edges(oq.distance_bin_width, oq.coordinate_bin_width)
                    logging.info("%d dist bins from %s to %s", len(dist_edges) - 1, min(dist_edges), max(dist_edges))
                    logging.info("%d lon bins from %s to %s", len(lon_edges) - 1, bb.west, bb.east)
                    logging.info("%d lat bins from %s to %s", len(lon_edges) - 1, bb.south, bb.north)

                    self.bin_edges[sm_id, sid] = (mag_edges, dist_edges, lon_edges, lat_edges, eps_edges)

                bin_edges = {}
                for sid, site in zip(sitecol.sids, sitecol):
                    if (sm_id, sid) in self.bin_edges:
                        bin_edges[sid] = self.bin_edges[sm_id, sid]

                src_filter = SourceFilter(sitecol, oq.maximum_distance)
                split_sources = []
                for src in src_group:
                    for split, _sites in src_filter(sourceconverter.split_source(src), sitecol):
                        split_sources.append(split)
                for srcs in split_in_blocks(split_sources, nblocks):
                    all_args.append(
                        (
                            src_filter,
                            srcs,
                            src_group.id,
                            self.rlzs_assoc,
                            trt_names,
                            curves_dict,
                            bin_edges,
                            oq,
                            self.monitor,
                        )
                    )

        results = parallel.starmap(compute_disagg, all_args).reduce(self.agg_result)
        self.save_disagg_results(results)