예제 #1
0
    def test_case_7(self):
        # test with 7+2 ruptures of two source models, 1 GSIM, 1 site
        self.run_calc(case_7.__file__, 'job.ini')
        ctxs0 = read_ctxs(self.calc.datastore, 'mag_7.70', gidx=0)[0]
        ctxs1 = read_ctxs(self.calc.datastore, 'mag_7.70', gidx=1)[0]
        self.assertEqual(len(ctxs0), 7)  # rlz-0, the closest to the mean
        self.assertEqual(len(ctxs1), 2)  # rlz-1, the one to discard
        # checking that the wrong realization is indeed discarded
        pd = self.calc.datastore['performance_data'][:]
        pd = pd[pd['operation'] == b'disaggregate']
        self.assertEqual(pd['counts'], 1)  # because g_by_z is empty

        haz = self.calc.datastore['hmap4'][0, 0, :, 0]  # shape NMPZ
        self.assertEqual(haz[0], 0)  # shortest return period => 0 hazard
        self.assertEqual(haz[1], 0.18757115242025785)
예제 #2
0
    def test_case_7(self):
        # test with 7+2 ruptures of two source models, 1 GSIM, 1 site
        self.run_calc(case_7.__file__, 'job.ini')
        ctxs, _ = read_ctxs(self.calc.datastore)
        ctxs0 = [ctx for ctx in ctxs if ctx.grp_id == 0]
        ctxs1 = [ctx for ctx in ctxs if ctx.grp_id == 1]
        self.assertEqual(len(ctxs0), 7)  # rlz-0, the closest to the mean
        self.assertEqual(len(ctxs1), 2)  # rlz-1, the one to discard
        # checking that the wrong realization is indeed discarded
        pd = self.calc.datastore['performance_data'][:]
        pd = pd[pd['operation'] == b'disaggregate']
        self.assertEqual(pd['counts'], 1)  # because g_by_z is empty

        haz = self.calc.datastore['hmap4'][0, 0, :, 0]  # shape NMPZ
        self.assertEqual(haz[0], 0)  # shortest return period => 0 hazard
        self.assertEqual(haz[1], 0.18757115242025785)

        # test normal disaggregation
        [fname] = export(('disagg', 'csv'), self.calc.datastore)
        self.assertEqualFiles('expected/rlz-0-PGA-sid-0-poe-1_TRT.csv', fname)

        # test conditional disaggregation
        [fname] = export(('disagg_traditional', 'csv'), self.calc.datastore)
        self.assertEqualFiles('expected/rlz-0-PGA-sid-0-poe-1-cond_TRT.csv',
                              fname)
예제 #3
0
def conditional_spectrum(dstore, slc, cmaker, monitor):
    """
    :param dstore:
        a DataStore instance
    :param slc:
        a slice of ruptures
    :param cmaker:
        a :class:`openquake.hazardlib.gsim.base.ContextMaker` instance
    :param monitor:
        monitor of the currently running job
    :returns:
        dictionary grp_id -> poes of shape (N, L, G)
    """
    RuptureContext.temporal_occurrence_model = PoissonTOM(
        cmaker.investigation_time)
    with monitor('reading contexts', measuremem=True):
        dstore.open('r')
        allctxs, _close = read_ctxs(dstore, slc)
    N, L, G = len(_close), cmaker.imtls.size, len(cmaker.gsims)
    acc = numpy.ones((N, L, G))
    for ctx, poes in cmaker.gen_ctx_poes(allctxs):
        acc *= ctx.get_probability_no_exceedance(poes)
    return {cmaker.grp_id: 1 - acc}
예제 #4
0
def compute_disagg(dstore, rctx, cmaker, hmap4, trti, bin_edges, oq, monitor):
    # see https://bugs.launchpad.net/oq-engine/+bug/1279247 for an explanation
    # of the algorithm used
    """
    :param dstore:
        a DataStore instance
    :param rctx:
        an array of rupture parameters
    :param cmaker:
        a :class:`openquake.hazardlib.gsim.base.ContextMaker` instance
    :param hmap4:
        an ArrayWrapper of shape (N, M, P, Z)
    :param trti:
        tectonic region type index
    :param magi:
        magnitude bin index
    :param bin_egdes:
        a quartet (dist_edges, lon_edges, lat_edges, eps_edges)
    :param monitor:
        monitor of the currently running job
    :returns:
        a dictionary sid, imti -> 6D-array
    """
    RuptureContext.temporal_occurrence_model = PoissonTOM(
        oq.investigation_time)
    with monitor('reading contexts', measuremem=True):
        dstore.open('r')
        ctxs, close_ctxs = read_ctxs(
            dstore, rctx, req_site_params=cmaker.REQUIRES_SITES_PARAMETERS)

    magi = numpy.searchsorted(bin_edges[0], rctx[0]['mag']) - 1
    if magi == -1:  # when the magnitude is on the edge
        magi = 0
    dis_mon = monitor('disaggregate', measuremem=False)
    ms_mon = monitor('disagg mean_std', measuremem=True)
    N, M, P, Z = hmap4.shape
    g_by_z = AccumDict(accum={})  # dict s -> z -> g
    for g, rlzs in enumerate(cmaker.gsims.values()):
        for (s, z), r in numpy.ndenumerate(hmap4.rlzs):
            if r in rlzs:
                g_by_z[s][z] = g
    eps3 = disagg._eps3(cmaker.trunclevel, oq.num_epsilon_bins)
    res = {'trti': trti, 'magi': magi}
    imts = [from_string(im) for im in oq.imtls]
    with ms_mon:
        # compute mean and std for a single IMT to save memory
        # the size is N * U * G * 16 bytes
        disagg.set_mean_std(ctxs, imts, cmaker.gsims)

    # disaggregate by site, IMT
    for s, iml3 in enumerate(hmap4):
        if not g_by_z[s] or not close_ctxs[s]:
            # g_by_z[s] is empty in test case_7
            continue
        # dist_bins, lon_bins, lat_bins, eps_bins
        bins = (bin_edges[1], bin_edges[2][s], bin_edges[3][s], bin_edges[4])
        iml2 = dict(zip(imts, iml3))
        with dis_mon:
            # 7D-matrix #distbins, #lonbins, #latbins, #epsbins, M, P, Z
            matrix = disagg.disaggregate(close_ctxs[s], g_by_z[s], iml2, eps3,
                                         s, bins)  # 7D-matrix
            for m in range(M):
                mat6 = matrix[..., m, :, :]
                if mat6.any():
                    res[s, m] = output(mat6)
    return res
예제 #5
0
def compute_disagg(dstore, slc, cmaker, hmap4, magi, bin_edges, monitor):
    # see https://bugs.launchpad.net/oq-engine/+bug/1279247 for an explanation
    # of the algorithm used
    """
    :param dstore:
        a DataStore instance
    :param slc:
        a slice of ruptures
    :param cmaker:
        a :class:`openquake.hazardlib.gsim.base.ContextMaker` instance
    :param hmap4:
        an ArrayWrapper of shape (N, M, P, Z)
    :param magi:
        magnitude bin indices
    :param bin_egdes:
        a quartet (dist_edges, lon_edges, lat_edges, eps_edges)
    :param monitor:
        monitor of the currently running job
    :returns:
        a dictionary sid, imti -> 6D-array
    """
    RuptureContext.temporal_occurrence_model = PoissonTOM(
        cmaker.investigation_time)
    with monitor('reading contexts', measuremem=True):
        dstore.open('r')
        allctxs, ctxs_around_site = read_ctxs(dstore, slc)
        for magidx, ctx in zip(magi, allctxs):
            ctx.magi = magidx
    dis_mon = monitor('disaggregate', measuremem=False)
    ms_mon = monitor('disagg mean_std', measuremem=True)
    N, M, P, Z = hmap4.shape
    g_by_z = AccumDict(accum={})  # dict s -> z -> g
    for g, rlzs in enumerate(cmaker.gsims.values()):
        for (s, z), r in numpy.ndenumerate(hmap4.rlzs):
            if r in rlzs:
                g_by_z[s][z] = g
    eps3 = disagg._eps3(cmaker.trunclevel, cmaker.num_epsilon_bins)
    imts = [from_string(im) for im in cmaker.imtls]
    for magi, ctxs in groupby(allctxs, operator.attrgetter('magi')).items():
        res = {'trti': cmaker.trti, 'magi': magi}
        with ms_mon:
            # compute mean and std (N * U * M * G * 16 bytes)
            disagg.set_mean_std(ctxs, imts, cmaker.gsims)

        # disaggregate by site, IMT
        for s, iml3 in enumerate(hmap4):
            close = [ctx for ctx in ctxs_around_site[s] if ctx.magi == magi]
            if not g_by_z[s] or not close:
                # g_by_z[s] is empty in test case_7
                continue
            # dist_bins, lon_bins, lat_bins, eps_bins
            bins = (bin_edges[1], bin_edges[2][s], bin_edges[3][s],
                    bin_edges[4])
            iml2 = dict(zip(imts, iml3))
            with dis_mon:
                # 7D-matrix #distbins, #lonbins, #latbins, #epsbins, M, P, Z
                matrix = disagg.disaggregate(close, g_by_z[s], iml2, eps3, s,
                                             bins)  # 7D-matrix
                for m in range(M):
                    mat6 = matrix[..., m, :, :]
                    if mat6.any():
                        res[s, m] = output(mat6)
        yield res