def _disaggregate(cmaker, sitecol, ctxs, iml2, eps3, pne_mon=performance.Monitor(), gmf_mon=performance.Monitor()): # disaggregate (separate) PoE in different contributions # returns AccumDict with keys (poe, imt) and mags, dists, lons, lats acc = dict(pnes=[], mags=[], dists=[], lons=[], lats=[]) try: gsim = cmaker.gsim_by_rlzi[iml2.rlzi] except KeyError: return pack(acc, 'mags dists lons lats pnes'.split()) for rctx, dctx in ctxs: [dist] = dctx.rrup if gsim.minimum_distance and dist < gsim.minimum_distance: dist = gsim.minimum_distance acc['mags'].append(rctx.mag) acc['lons'].append(dctx.lon) acc['lats'].append(dctx.lat) acc['dists'].append(dist) with gmf_mon: mean_std = get_mean_std(sitecol, rctx, dctx, iml2.imts, [gsim])[..., 0] # (2, N, M) with pne_mon: iml = numpy.array([ to_distribution_values(lvl, imt) for imt, lvl in zip(iml2.imts, iml2) ]) # shape (M, P) pne = _disaggregate_pne(rctx, mean_std, iml, *eps3) acc['pnes'].append(pne) return pack(acc, 'mags dists lons lats pnes'.split())
def disaggregate(mean_std, rups, imt, imls, eps3, pne_mon=performance.Monitor()): # disaggregate (separate) PoE in different contributions U, P, E = len(rups), len(imls), len(eps3[2]) bdata = BinData(mags=numpy.zeros(U), dists=numpy.zeros(U), lons=numpy.zeros(U), lats=numpy.zeros(U), pnes=numpy.zeros((U, P, E))) with pne_mon: truncnorm, epsilons, eps_bands = eps3 cum_bands = numpy.array([eps_bands[e:].sum() for e in range(E)] + [0]) imls = to_distribution_values(imls, imt) # shape P for u, rup in enumerate(rups): bdata.mags[u] = rup.mag bdata.lons[u] = rup.lon bdata.lats[u] = rup.lat bdata.dists[u] = rup.rrup[0] for p, iml in enumerate(imls): lvls = (iml - mean_std[0]) / mean_std[1] survival = truncnorm.sf(lvls) bins = numpy.searchsorted(epsilons, lvls) for e, eps_band in enumerate(eps_bands): poes = _disagg_eps(survival, bins, e, eps_band, cum_bands) for u, rup in enumerate(rups): bdata.pnes[u, p, e] = rup.get_probability_no_exceedance(poes[u]) return bdata
def disaggregate(ctxs, g_by_z, iml2dict, eps3, sid=0, bin_edges=()): """ :param ctxs: a list of U fat RuptureContexts :param imts: a list of Intensity Measure Type objects :param g_by_z: an array of gsim indices :param imt: an Intensity Measure Type :param iml2dict: a dictionary of arrays imt -> (P, Z) :param eps3: a triplet (truncnorm, epsilons, eps_bands) """ # disaggregate (separate) PoE in different contributions U, E, M = len(ctxs), len(eps3[2]), len(iml2dict) iml2 = next(iter(iml2dict.values())) P, Z = iml2.shape dists = numpy.zeros(U) lons = numpy.zeros(U) lats = numpy.zeros(U) # switch to logarithmic intensities iml3 = numpy.zeros((M, P, Z)) for m, (imt, iml2) in enumerate(iml2dict.items()): # 0 values are converted into -inf iml3[m] = to_distribution_values(iml2, imt) truncnorm, epsilons, eps_bands = eps3 cum_bands = numpy.array([eps_bands[e:].sum() for e in range(E)] + [0]) G = len(ctxs[0].mean_std) mean_std = numpy.zeros((2, U, M, G), numpy.float32) for u, ctx in enumerate(ctxs): if not hasattr(ctx, 'idx'): # assume single site idx = 0 else: idx = ctx.idx[sid] dists[u] = ctx.rrup[idx] # distance to the site lons[u] = ctx.clon[idx] # closest point of the rupture lon lats[u] = ctx.clat[idx] # closest point of the rupture lat for g in range(G): mean_std[:, u, :, g] = ctx.mean_std[g][:, idx] # (2, M) poes = numpy.zeros((U, E, M, P, Z)) pnes = numpy.ones((U, E, M, P, Z)) for (m, p, z), iml in numpy.ndenumerate(iml3): if iml == -numpy.inf: # zero hazard continue # discard the z contributions coming from wrong realizations: see # the test disagg/case_2 try: g = g_by_z[z] except KeyError: continue lvls = (iml - mean_std[0, :, m, g]) / mean_std[1, :, m, g] idxs = numpy.searchsorted(epsilons, lvls) poes[:, :, m, p, z] = _disagg_eps( truncnorm.sf(lvls), idxs, eps_bands, cum_bands) for u, ctx in enumerate(ctxs): pnes[u] *= ctx.get_probability_no_exceedance(poes[u]) # this is slow bindata = BinData(dists, lons, lats, pnes) DEBUG[idx].append(pnes.mean()) if not bin_edges: return bindata return _build_disagg_matrix(bindata, bin_edges)
def disaggregate(ctxs, mean_std, zs_by_g, iml2dict, eps3, sid=0, bin_edges=(), pne_mon=performance.Monitor(), mat_mon=performance.Monitor()): """ :param ctxs: a list of U fat RuptureContexts :param imts: a list of Intensity Measure Type objects :param zs_by_g: a dictionary g -> Z indices :param imt: an Intensity Measure Type :param iml2dict: a dictionary of arrays imt -> (P, Z) :param eps3: a triplet (truncnorm, epsilons, eps_bands) :param pne_mon: monitor for the probabilities of no exceedance """ # disaggregate (separate) PoE in different contributions U, E, M = len(ctxs), len(eps3[2]), len(iml2dict) iml2 = next(iter(iml2dict.values())) P, Z = iml2.shape dists = numpy.zeros(U) lons = numpy.zeros(U) lats = numpy.zeros(U) # switch to logarithmic intensities iml3 = numpy.zeros((M, P, Z)) for m, (imt, iml2) in enumerate(iml2dict.items()): iml3[m] = to_distribution_values(iml2, imt) truncnorm, epsilons, eps_bands = eps3 cum_bands = numpy.array([eps_bands[e:].sum() for e in range(E)] + [0]) for u, ctx in enumerate(ctxs): dists[u] = ctx.rrup[sid] # distance to the site lons[u] = ctx.clon[sid] # closest point of the rupture lon lats[u] = ctx.clat[sid] # closest point of the rupture lat with pne_mon: poes = numpy.zeros((U, E, M, P, Z)) pnes = numpy.ones((U, E, M, P, Z)) for g, zs in zs_by_g.items(): for (m, p, z), iml in numpy.ndenumerate(iml3): if z in zs: lvls = (iml - mean_std[0, :, sid, m, g]) / ( mean_std[1, :, sid, m, g]) idxs = numpy.searchsorted(epsilons, lvls) poes[:, :, m, p, z] = _disagg_eps(truncnorm.sf(lvls), idxs, eps_bands, cum_bands) for u, ctx in enumerate(ctxs): pnes[u] *= ctx.get_probability_no_exceedance(poes[u]) bindata = BinData(dists, lons, lats, pnes) if not bin_edges: return bindata with mat_mon: return _build_disagg_matrix(bindata, bin_edges)
def _disaggregate(cmaker, sitecol, rupdata, indices, iml2, eps3, pne_mon=performance.Monitor(), gmf_mon=performance.Monitor()): # disaggregate (separate) PoE in different contributions # returns AccumDict with keys (poe, imt) and mags, dists, lons, lats [sid] = sitecol.sids acc = dict(pnes=[], mags=[], dists=[], lons=[], lats=[]) try: gsim = cmaker.gsim_by_rlzi[iml2.rlzi] except KeyError: return pack(acc, 'mags dists lons lats pnes'.split()) maxdist = cmaker.maximum_distance(cmaker.trt) fildist = rupdata[cmaker.filter_distance + '_'] for ridx, sidx in enumerate(indices): if sidx == -1: # no contribution for this site continue dist = fildist[ridx][sidx] if dist >= maxdist: continue elif gsim.minimum_distance and dist < gsim.minimum_distance: dist = gsim.minimum_distance rctx = contexts.RuptureContext( (par, val[ridx]) for par, val in rupdata.items()) dctx = contexts.DistancesContext( (param, getattr(rctx, param + '_')[[sidx]]) for param in cmaker.REQUIRES_DISTANCES) acc['mags'].append(rctx.mag) acc['lons'].append(rctx.lon_[sidx]) acc['lats'].append(rctx.lat_[sidx]) acc['dists'].append(dist) with gmf_mon: mean_std = get_mean_std(sitecol, rctx, dctx, iml2.imts, [gsim])[..., 0] # (2, N, M) with pne_mon: iml = numpy.array([ to_distribution_values(lvl, imt) for imt, lvl in zip(iml2.imts, iml2) ]) # shape (M, P) pne = _disaggregate_pne(rctx, mean_std, iml, *eps3) acc['pnes'].append(pne) return pack(acc, 'mags dists lons lats pnes'.split())