Exemplo n.º 1
0
 def poe_map(self, src, s_sites, imtls, trunclevel, rup_indep=True):
     """
     :param src: a source object
     :param s_sites: a filtered SiteCollection of sites around the source
     :param imtls: intensity measure and levels
     :param trunclevel: truncation level
     :param rup_indep: True if the ruptures are independent
     :returns: a ProbabilityMap instance
     """
     pmap = ProbabilityMap.build(len(imtls.array),
                                 len(self.gsims),
                                 s_sites.sids,
                                 initvalue=rup_indep)
     eff_ruptures = 0
     for rup, sctx, dctx in self.gen_rup_contexts(src, s_sites):
         eff_ruptures += 1
         with self.poe_mon:
             pnes = self._make_pnes(rup, sctx, dctx, imtls, trunclevel)
             for sid, pne in zip(sctx.sids, pnes):
                 if rup_indep:
                     pmap[sid].array *= pne
                 else:
                     pmap[sid].array += (1. - pne) * rup.weight
     if rup_indep:
         pmap = ~pmap
     pmap.eff_ruptures = eff_ruptures
     return pmap
Exemplo n.º 2
0
 def init(self, pmaps, grp_id):
     """
     Initialize the pmaps dictionary with zeros, if needed
     """
     if grp_id not in pmaps:
         L, G = self.imtls.size, len(self.rlzs_by_gsim_list[grp_id])
         pmaps[grp_id] = ProbabilityMap.build(L, G, self.sids)
Exemplo n.º 3
0
 def make_pmap(self, ruptures, imtls, trunclevel, rup_indep):
     """
     :param src: a source object
     :param ruptures: a list of "dressed" ruptures
     :param imtls: intensity measure and levels
     :param trunclevel: truncation level
     :param rup_indep: True if the ruptures are independent
     :returns: a ProbabilityMap instance
     """
     sids = set()
     for rup in ruptures:
         sids.update(rup.sctx.sids)
     pmap = ProbabilityMap.build(len(imtls.array),
                                 len(self.gsims),
                                 sids,
                                 initvalue=rup_indep)
     for rup in ruptures:
         pnes = self._make_pnes(rup, imtls, trunclevel)
         for sid, pne in zip(rup.sctx.sids, pnes):
             if rup_indep:
                 pmap[sid].array *= pne
             else:
                 pmap[sid].array += pne * rup.weight
     tildemap = ~pmap
     tildemap.eff_ruptures = len(ruptures)
     return tildemap
Exemplo n.º 4
0
 def poe_map(self, src, s_sites, imtls, trunclevel, rup_indep=True):
     """
     :param src: a source object
     :param s_sites: a filtered SiteCollection of sites around the source
     :param imtls: intensity measure and levels
     :param trunclevel: truncation level
     :param rup_indep: True if the ruptures are independent
     :returns: a ProbabilityMap instance
     """
     pmap = ProbabilityMap.build(
         len(imtls.array), len(self.gsims), s_sites.sids,
         initvalue=rup_indep)
     eff_ruptures = 0
     for rup, sctx, dctx in self.gen_rup_contexts(src, s_sites):
         eff_ruptures += 1
         with self.poe_mon:
             pnes = self._make_pnes(rup, sctx, dctx, imtls, trunclevel)
             for sid, pne in zip(sctx.sids, pnes):
                 if rup_indep:
                     pmap[sid].array *= pne
                 else:
                     pmap[sid].array += (1.-pne) * rup.weight
     if rup_indep:
         pmap = ~pmap
     pmap.eff_ruptures = eff_ruptures
     return pmap
Exemplo n.º 5
0
    def test(self):
        pmap1 = ProbabilityMap.build(3, 1, sids=[0, 1, 2])
        pmap1[0].array[0] = .4

        pmap2 = ProbabilityMap.build(3, 1, sids=[0, 1, 2])
        pmap2[0].array[0] = .5

        # test probability composition
        pmap = pmap1 | pmap2
        numpy.testing.assert_equal(pmap[0].array, [[.7], [0], [0]])

        # test probability multiplication
        pmap = pmap1 * pmap2
        numpy.testing.assert_equal(pmap[0].array, [[.2], [0], [0]])

        # test pmap power
        pmap = pmap1**2
        numpy.testing.assert_almost_equal(pmap[0].array, [[.16], [0], [0]])
Exemplo n.º 6
0
    def get_args(self, acc0):
        """
        :returns: the outputs to pass to the Starmap, ordered by weight
        """
        oq = self.oqparam
        L = len(oq.imtls.array)
        sids = self.sitecol.complete.sids
        allargs = []
        src_groups = self.csm.src_groups
        tot_weight = 0
        et_ids = self.datastore['et_ids'][:]
        rlzs_by_gsim_list = self.full_lt.get_rlzs_by_gsim_list(et_ids)
        grp_id = 0
        for rlzs_by_gsim, sg in zip(rlzs_by_gsim_list, src_groups):
            acc0[grp_id] = ProbabilityMap.build(L, len(rlzs_by_gsim), sids)
            grp_id += 1
            for src in sg:
                src.ngsims = len(rlzs_by_gsim)
                tot_weight += src.weight
                if src.code == b'C' and src.num_ruptures > 20_000:
                    msg = ('{} is suspiciously large, containing {:_d} '
                           'ruptures with complex_fault_mesh_spacing={} km')
                    spc = oq.complex_fault_mesh_spacing
                    logging.info(msg.format(src, src.num_ruptures, spc))
        assert tot_weight
        C = oq.concurrent_tasks or 1
        max_weight = max(tot_weight / (2.5 * C), oq.min_weight)
        self.params['max_weight'] = max_weight
        logging.info('tot_weight={:_d}, max_weight={:_d}'.format(
            int(tot_weight), int(max_weight)))
        for rlzs_by_gsim, sg in zip(rlzs_by_gsim_list, src_groups):
            nb = 0
            if sg.atomic:
                # do not split atomic groups
                nb += 1
                allargs.append((sg, rlzs_by_gsim, self.params))
            else:  # regroup the sources in blocks
                blks = (groupby(sg, get_source_id).values()
                        if oq.disagg_by_src else block_splitter(
                            sg, max_weight, get_weight, sort=True))
                blocks = list(blks)
                nb += len(blocks)
                for block in blocks:
                    logging.debug('Sending %d source(s) with weight %d',
                                  len(block), sum(src.weight for src in block))
                    allargs.append((block, rlzs_by_gsim, self.params))

            w = sum(src.weight for src in sg)
            it = sorted(oq.maximum_distance.ddic[sg.trt].items())
            md = '%s->%d ... %s->%d' % (it[0] + it[-1])
            logging.info(
                'max_dist={}, gsims={}, weight={:_d}, blocks={}'.format(
                    md, len(rlzs_by_gsim), int(w), nb))
        allargs.sort(key=lambda args: sum(src.weight for src in args[0]),
                     reverse=True)
        return allargs
Exemplo n.º 7
0
 def poe_map(self, src, sites, imtls, trunclevel, rup_indep=True):
     """
     :param src: a source object
     :param sites: a filtered SiteCollection
     :param imtls: intensity measure and levels
     :param trunclevel: truncation level
     :param rup_indep: True if the ruptures are independent
     :returns: a ProbabilityMap instance
     """
     pmap = ProbabilityMap.build(len(imtls.array),
                                 len(self.gsims),
                                 sites.sids,
                                 initvalue=rup_indep)
     eff_ruptures = 0
     with self.ir_mon:
         if hasattr(src, 'location'):
             dist = src.location.distance_to_mesh(sites).min()
             if (self.hypo_dist_collapsing_distance is not None
                     and dist > self.hypo_dist_collapsing_distance):
                 # disable floating
                 src.hypocenter_distribution.reduce()
             if (self.nodal_dist_collapsing_distance is not None
                     and dist > self.nodal_dist_collapsing_distance):
                 # disable spinning
                 src.nodal_plane_distribution.reduce()
         rups = list(src.iter_ruptures())
     # normally len(rups) == src.num_ruptures, but in UCERF .iter_ruptures
     # discards far away ruptures: len(rups) < src.num_ruptures can happen
     if len(rups) > src.num_ruptures:
         raise ValueError('Expected at max %d ruptures, got %d' %
                          (src.num_ruptures, len(rups)))
     weight = 1. / len(rups)
     for rup in rups:
         rup.weight = weight
         try:
             with self.ctx_mon:
                 sctx, dctx = self.make_contexts(sites, rup)
         except FarAwayRupture:
             continue
         eff_ruptures += 1
         with self.poe_mon:
             pnes = self._make_pnes(rup, sctx, dctx, imtls, trunclevel)
             for sid, pne in zip(sctx.sids, pnes):
                 if rup_indep:
                     pmap[sid].array *= pne
                 else:
                     pmap[sid].array += pne * rup.weight
     pmap = ~pmap
     pmap.eff_ruptures = eff_ruptures
     return pmap
Exemplo n.º 8
0
 def execute(self):
     """
     Run in parallel `core_task(sources, sitecol, monitor)`, by
     parallelizing on the sources according to their weight and
     tectonic region type.
     """
     oq = self.oqparam
     if oq.hazard_calculation_id:
         parent = self.datastore.parent
         if '_poes' in parent:
             self.post_classical()  # repeat post-processing
             return {}
         else:  # after preclassical, like in case_36
             self.csm = parent['_csm']
             self.full_lt = parent['full_lt']
             self.datastore['source_info'] = parent['source_info'][:]
             max_weight = self.csm.get_max_weight(oq)
     else:
         max_weight = self.max_weight
     self.create_dsets()  # create the rup/ datasets BEFORE swmr_on()
     srcidx = {
         rec[0]: i
         for i, rec in enumerate(self.csm.source_info.values())
     }
     self.haz = Hazard(self.datastore, self.full_lt, srcidx)
     # only groups generating more than 1 task preallocate memory
     num_gs = [len(cm.gsims) for grp, cm in enumerate(self.haz.cmakers)]
     L = oq.imtls.size
     tiles = self.sitecol.split_max(oq.max_sites_per_tile)
     if len(tiles) > 1:
         sizes = [len(tile) for tile in tiles]
         logging.info('There are %d tiles of sizes %s', len(tiles), sizes)
         for size in sizes:
             assert size > oq.max_sites_disagg, (size, oq.max_sites_disagg)
     self.source_data = AccumDict(accum=[])
     self.n_outs = AccumDict(accum=0)
     acc = {}
     for t, tile in enumerate(tiles, 1):
         self.check_memory(len(tile), L, num_gs)
         sids = tile.sids if len(tiles) > 1 else None
         smap = self.submit(sids, self.haz.cmakers, max_weight)
         for cm in self.haz.cmakers:
             acc[cm.grp_id] = ProbabilityMap.build(L, len(cm.gsims))
         smap.reduce(self.agg_dicts, acc)
         logging.debug("busy time: %s", smap.busytime)
         logging.info('Finished tile %d of %d', t, len(tiles))
     self.store_info()
     self.haz.store_disagg(acc)
     return True
Exemplo n.º 9
0
 def get_args_pmaps(self, grp_ids, hazard):
     """
     :returns: (Starmap arguments, Pmap dictionary)
     """
     oq = self.oqparam
     L = oq.imtls.size
     sids = self.sitecol.complete.sids
     allargs = []
     src_groups = self.csm.src_groups
     tot_weight = 0
     pmapdic = {}
     for grp_id in grp_ids:
         rlzs_by_gsim = hazard.rlzs_by_gsim_list[grp_id]
         sg = src_groups[grp_id]
         pmapdic[grp_id] = ProbabilityMap.build(L, len(rlzs_by_gsim), sids)
         for src in sg:
             src.ngsims = len(rlzs_by_gsim)
             tot_weight += src.weight
             if src.code == b'C' and src.num_ruptures > 20_000:
                 msg = ('{} is suspiciously large, containing {:_d} '
                        'ruptures with complex_fault_mesh_spacing={} km')
                 spc = oq.complex_fault_mesh_spacing
                 logging.info(msg.format(src, src.num_ruptures, spc))
     assert tot_weight
     max_weight = max(tot_weight / self.ct, oq.min_weight)
     self.params['max_weight'] = max_weight
     logging.info('tot_weight={:_d}, max_weight={:_d}'.format(
         int(tot_weight), int(max_weight)))
     for grp_id in grp_ids:
         rlzs_by_gsim = hazard.rlzs_by_gsim_list[grp_id]
         sg = src_groups[grp_id]
         nb = 0
         if sg.atomic:
             # do not split atomic groups
             nb += 1
             allargs.append((sg, rlzs_by_gsim, self.params))
         else:  # regroup the sources in blocks
             blks = (groupby(sg, get_source_id).values()
                     if oq.disagg_by_src else block_splitter(
                         sg, max_weight, get_weight, sort=True))
             blocks = list(blks)
             nb += len(blocks)
             for block in blocks:
                 logging.debug('Sending %d source(s) with weight %d',
                               len(block), sum(src.weight for src in block))
                 allargs.append((block, rlzs_by_gsim, self.params))
     allargs.sort(key=lambda args: sum(src.weight for src in args[0]),
                  reverse=True)
     return allargs, pmapdic
Exemplo n.º 10
0
def poe_map(src, s_sites, imtls, cmaker, trunclevel, bbs, rup_indep, ctx_mon,
            pne_mon, disagg_mon):
    """
    Compute the ProbabilityMap generated by the given source. Also,
    store some information in the monitors and optionally in the
    bounding boxes.
    """
    pmap = ProbabilityMap.build(len(imtls.array),
                                len(cmaker.gsims),
                                s_sites.sids,
                                initvalue=rup_indep)
    try:
        for rup, weight in rupture_weight_pairs(src):
            with ctx_mon:  # compute distances
                try:
                    sctx, rctx, dctx = cmaker.make_contexts(s_sites, rup)
                except FarAwayRupture:
                    continue
            with pne_mon:  # compute probabilities and updates the pmap
                pnes = get_probability_no_exceedance(rup, sctx, rctx, dctx,
                                                     imtls, cmaker.gsims,
                                                     trunclevel)
                for sid, pne in zip(sctx.sites.sids, pnes):
                    if rup_indep:
                        pmap[sid].array *= pne
                    else:
                        pmap[sid].array += pne * weight
            # add optional disaggregation information (bounding boxes)
            if bbs:
                with disagg_mon:
                    sids = set(sctx.sites.sids)
                    jb_dists = dctx.rjb
                    closest_points = rup.surface.get_closest_points(
                        sctx.sites.mesh)
                    bs = [bb for bb in bbs if bb.site_id in sids]
                    # NB: the assert below is always true; we are
                    # protecting against possible refactoring errors
                    assert len(bs) == len(jb_dists) == len(closest_points)
                    for bb, dist, p in zip(bs, jb_dists, closest_points):
                        bb.update([dist], [p.longitude], [p.latitude])
    except Exception as err:
        etype, err, tb = sys.exc_info()
        msg = 'An error occurred with source id=%s. Error: %s'
        msg %= (src.source_id, str(err))
        raise_(etype, msg, tb)
    return ~pmap
Exemplo n.º 11
0
def poe_map(src, s_sites, imtls, cmaker, trunclevel, bbs, rup_indep,
            ctx_mon, pne_mon, disagg_mon):
    """
    Compute the ProbabilityMap generated by the given source. Also,
    store some information in the monitors and optionally in the
    bounding boxes.
    """
    pmap = ProbabilityMap.build(
        len(imtls.array), len(cmaker.gsims), s_sites.sids, initvalue=rup_indep)
    try:
        for rup, weight in rupture_weight_pairs(src):
            with ctx_mon:  # compute distances
                try:
                    sctx, rctx, dctx = cmaker.make_contexts(s_sites, rup)
                except FarAwayRupture:
                    continue
            with pne_mon:  # compute probabilities and updates the pmap
                pnes = get_probability_no_exceedance(
                    rup, sctx, rctx, dctx, imtls, cmaker.gsims, trunclevel)
                for sid, pne in zip(sctx.sites.sids, pnes):
                    if rup_indep:
                        pmap[sid].array *= pne
                    else:
                        pmap[sid].array += pne * weight
            # add optional disaggregation information (bounding boxes)
            if bbs:
                with disagg_mon:
                    sids = set(sctx.sites.sids)
                    jb_dists = dctx.rjb
                    closest_points = rup.surface.get_closest_points(
                        sctx.sites.mesh)
                    bs = [bb for bb in bbs if bb.site_id in sids]
                    # NB: the assert below is always true; we are
                    # protecting against possible refactoring errors
                    assert len(bs) == len(jb_dists) == len(closest_points)
                    for bb, dist, p in zip(bs, jb_dists, closest_points):
                        bb.update([dist], [p.longitude], [p.latitude])
    except Exception as err:
        etype, err, tb = sys.exc_info()
        msg = 'An error occurred with source id=%s. Error: %s'
        msg %= (src.source_id, str(err))
        raise_(etype, msg, tb)
    return ~pmap
Exemplo n.º 12
0
def poe_map(src,
            s_sites,
            imtls,
            cmaker,
            trunclevel,
            ctx_mon,
            pne_mons,
            rup_indep=True):
    """
    Compute the ProbabilityMap generated by the given source. Also,
    store some information in the monitors and optionally in the
    bounding boxes.
    """
    pmap = ProbabilityMap.build(len(imtls.array),
                                len(cmaker.gsims),
                                s_sites.sids,
                                initvalue=rup_indep)
    eff_ruptures = 0
    try:
        for rup, weight in rupture_weight_pairs(src):
            with ctx_mon:  # compute distances
                try:
                    sctx, rctx, dctx = cmaker.make_contexts(s_sites, rup)
                except FarAwayRupture:
                    continue
            eff_ruptures += 1
            # compute probabilities and updates the pmap
            pnes = get_probability_no_exceedance(rup, sctx, rctx, dctx, imtls,
                                                 cmaker.gsims, trunclevel,
                                                 pne_mons)
            for sid, pne in zip(sctx.sites.sids, pnes):
                if rup_indep:
                    pmap[sid].array *= pne
                else:
                    pmap[sid].array += pne * weight
    except Exception as err:
        etype, err, tb = sys.exc_info()
        msg = '%s (source id=%s)' % (str(err), src.source_id)
        raise_(etype, msg, tb)
    tildemap = ~pmap
    tildemap.eff_ruptures = eff_ruptures
    return tildemap
Exemplo n.º 13
0
def make_hmap(pmap, imtls, poes):
    """
    Compute the hazard maps associated to the passed probability map.

    :param pmap: hazard curves in the form of a ProbabilityMap
    :param imtls: I intensity measure types and levels
    :param poes: P PoEs where to compute the maps
    :returns: a ProbabilityMap with size (N, I * P, 1)
    """
    I, P = len(imtls), len(poes)
    hmap = ProbabilityMap.build(I * P, 1, pmap)
    for i, imt in enumerate(imtls):
        curves = numpy.array([pmap[sid].array[imtls.slicedic[imt], 0]
                              for sid in pmap.sids])
        data = compute_hazard_maps(curves, imtls[imt], poes)  # array N x P
        for sid, value in zip(pmap.sids, data):
            array = hmap[sid].array
            for j, val in enumerate(value):
                array[i * P + j] = val
    return hmap
Exemplo n.º 14
0
 def poe_map(self, src, s_sites, imtls, trunclevel, rup_indep=True):
     """
     :param src: a source object
     :param s_sites: a filtered SiteCollection of sites around the source
     :param imtls: intensity measure and levels
     :param trunclevel: truncation level
     :param rup_indep: True if the ruptures are independent
     :returns: a ProbabilityMap instance
     """
     pmap = ProbabilityMap.build(len(imtls.array),
                                 len(self.gsims),
                                 s_sites.sids,
                                 initvalue=rup_indep)
     eff_ruptures = 0
     for rups, sites in self.get_ruptures_sites(src, s_sites):
         if len(rups) > src.num_ruptures:
             raise ValueError('Expected at max %d ruptures, got %d' %
                              (src.num_ruptures, len(rups)))
         weight = 1. / len(rups)
         for rup in rups:
             rup.weight = weight
             try:
                 with self.ctx_mon:
                     sctx, dctx = self.make_contexts(sites, rup)
             except FarAwayRupture:
                 continue
             eff_ruptures += 1
             with self.poe_mon:
                 pnes = self._make_pnes(rup, sctx, dctx, imtls, trunclevel)
                 for sid, pne in zip(sctx.sids, pnes):
                     if rup_indep:
                         pmap[sid].array *= pne
                     else:
                         pmap[sid].array += pne * rup.weight
     pmap = ~pmap
     pmap.eff_ruptures = eff_ruptures
     return pmap
Exemplo n.º 15
0
def ucerf_poe_map(hdf5, ucerf_source, rupset_idx, s_sites, imtls, cmaker,
                  trunclevel, bbs, ctx_mon, pne_mon, disagg_mon):
    """
    Compute a ProbabilityMap generated by the given set of indices.

    :param hdf5:
        UCERF file as instance of open h5py.File object
    :param ucerf_source:
        UCERFControl object
    :param list rupset_idx:
        List of rupture indices
    """
    pmap = ProbabilityMap.build(len(imtls.array), len(cmaker.gsims),
                                s_sites.sids, initvalue=1.)
    try:
        for ridx in rupset_idx:
            # Get the ucerf rupture
            if not hdf5[ucerf_source.idx_set["rate_idx"]][ridx]:
                # Ruptures seem to have a zero probability from time to time
                # If this happens, skip it
                continue

            rup, ridx_string = get_ucerf_rupture(
                hdf5, ridx,
                ucerf_source.idx_set,
                ucerf_source.tom, s_sites,
                ucerf_source.integration_distance,
                ucerf_source.mesh_spacing,
                ucerf_source.tectonic_region_type)
            if not rup:
                # rupture outside of integration distance
                continue
            with ctx_mon:  # compute distances
                try:
                    sctx, rctx, dctx = cmaker.make_contexts(s_sites, rup)
                except FarAwayRupture:
                    continue
            with pne_mon:  # compute probabilities and updates the pmap
                pnes = get_probability_no_exceedance(
                    rup, sctx, rctx, dctx, imtls, cmaker.gsims, trunclevel)
                for sid, pne in zip(sctx.sites.sids, pnes):
                    pmap[sid].array *= pne

            # add optional disaggregation information (bounding boxes)
            if bbs:
                with disagg_mon:
                    sids = set(sctx.sites.sids)
                    jb_dists = dctx.rjb
                    closest_points = rup.surface.get_closest_points(
                        sctx.sites.mesh)
                    bs = [bb for bb in bbs if bb.site_id in sids]
                    # NB: the assert below is always true; we are
                    # protecting against possible refactoring errors
                    assert len(bs) == len(jb_dists) == len(closest_points)
                    for bb, dist, p in zip(bs, jb_dists, closest_points):
                        bb.update([dist], [p.longitude], [p.latitude])
    except Exception as err:
        etype, err, tb = sys.exc_info()
        msg = 'An error occurred with rupture=%s. Error: %s'
        msg %= (ridx, str(err))
        raise_(etype, msg, tb)
    return ~pmap
Exemplo n.º 16
0
 def init(self, pmaps, grp_id):
     """
     Initialize the pmaps dictionary with zeros, if needed
     """
     L, G = self.imtls.size, len(self.cmakers[grp_id].gsims)
     pmaps[grp_id] = ProbabilityMap.build(L, G, self.sids)