def _update_pmap(self, ctxs, pmap=None): # compute PoEs and update pmap if pmap is None: # for src_indep pmap = self.pmap for rup, r_sites, dctx in ctxs: # this must be fast since it is inside an inner loop with self.gmf_mon: mean_std = base.get_mean_std( # shape (2, N, M, G) r_sites, rup, dctx, self.imts, self.gsims) with self.poe_mon: ll = self.loglevels poes = base.get_poes(mean_std, ll, self.trunclevel, self.gsims) for g, gsim in enumerate(self.gsims): for m, imt in enumerate(ll): if hasattr(gsim, 'weight') and gsim.weight[imt] == 0: # set by the engine when parsing the gsim logictree # when 0 ignore the gsim: see _build_trts_branches poes[:, ll(imt), g] = 0 with self.pne_mon: # pnes and poes of shape (N, L, G) pnes = rup.get_probability_no_exceedance(poes) for grp_id in rup.grp_ids: p = pmap[grp_id] if self.rup_indep: for sid, pne in zip(r_sites.sids, pnes): p.setdefault(sid, 1.).array *= pne else: # rup_mutex for sid, pne in zip(r_sites.sids, pnes): p.setdefault(sid, 0.).array += (1. - pne) * rup.weight
def max_intensity(self, onesite, mags, dists): """ :param onesite: a SiteCollection instance with a single site :param mags: a sequence of magnitudes :param dists: a sequence of distances :returns: an array of GMVs of shape (#mags, #dists) """ assert len(onesite) == 1, onesite nmags, ndists = len(mags), len(dists) gmv = numpy.zeros((nmags, ndists)) for m, d in itertools.product(range(nmags), range(ndists)): mag, dist = mags[m], dists[d] rup = RuptureContext() for par in self.REQUIRES_RUPTURE_PARAMETERS: setattr(rup, par, 0) rup.mag = mag rup.width = .01 # 10 meters to avoid warnings in abrahamson_2014 dctx = DistancesContext( (dst, numpy.array([dist])) for dst in self.REQUIRES_DISTANCES) means = [] for gsim in self.gsims: try: mean = base.get_mean_std( # shape (2, N, M, G) -> M onesite, rup, dctx, self.imts, [gsim])[0, 0, :, 0] except ValueError: # magnitude outside of supported range continue means.append(mean.max()) if means: gmv[m, d] = numpy.exp(max(means)) return gmv
def _disaggregate(cmaker, sitecol, ctxs, iml2, eps3, pne_mon=performance.Monitor(), gmf_mon=performance.Monitor()): # disaggregate (separate) PoE in different contributions # returns AccumDict with keys (poe, imt) and mags, dists, lons, lats acc = dict(pnes=[], mags=[], dists=[], lons=[], lats=[]) try: gsim = cmaker.gsim_by_rlzi[iml2.rlzi] except KeyError: return pack(acc, 'mags dists lons lats pnes'.split()) for rctx, dctx in ctxs: [dist] = dctx.rrup if gsim.minimum_distance and dist < gsim.minimum_distance: dist = gsim.minimum_distance acc['mags'].append(rctx.mag) acc['lons'].append(dctx.lon) acc['lats'].append(dctx.lat) acc['dists'].append(dist) with gmf_mon: mean_std = get_mean_std(sitecol, rctx, dctx, iml2.imts, [gsim])[..., 0] # (2, N, M) with pne_mon: iml = numpy.array([ to_distribution_values(lvl, imt) for imt, lvl in zip(iml2.imts, iml2) ]) # shape (M, P) pne = _disaggregate_pne(rctx, mean_std, iml, *eps3) acc['pnes'].append(pne) return pack(acc, 'mags dists lons lats pnes'.split())
def get_mean_stdv(site1, ctxs, imt, gsim): """ :param site1: site collection with a single site :param ctxs: a list of RuptureContexts with distances :param imt: Intensity Measure Type :param gsim: GMPE instance """ U = len(ctxs) ms = numpy.zeros((2, U), numpy.float32) for u, ctx in enumerate(ctxs): if gsim.minimum_distance and ctx.rrup[0] < gsim.minimum_distance: ctx.rrup = numpy.float32([gsim.minimum_distance]) ms[:, u] = get_mean_std(site1, ctx, ctx, [imt], [gsim]).reshape(2) return ms
def _sids_poes(self, rup, r_sites, dctx, srcid): # return sids and poes of shape (N, L, G) # NB: this must be fast since it is inside an inner loop with self.gmf_mon: mean_std = base.get_mean_std( # shape (2, N, M, G) r_sites, rup, dctx, self.imts, self.gsims) with self.poe_mon: ll = self.loglevels poes = base.get_poes(mean_std, ll, self.trunclevel, self.gsims) for g, gsim in enumerate(self.gsims): for m, imt in enumerate(ll): if hasattr(gsim, 'weight') and gsim.weight[imt] == 0: # set by the engine when parsing the gsim logictree; # when 0 ignore the gsim: see _build_trts_branches poes[:, ll(imt), g] = 0 return r_sites.sids, poes
def _disaggregate(cmaker, sitecol, rupdata, indices, iml2, eps3, pne_mon=performance.Monitor(), gmf_mon=performance.Monitor()): # disaggregate (separate) PoE in different contributions # returns AccumDict with keys (poe, imt) and mags, dists, lons, lats [sid] = sitecol.sids acc = dict(pnes=[], mags=[], dists=[], lons=[], lats=[]) try: gsim = cmaker.gsim_by_rlzi[iml2.rlzi] except KeyError: return pack(acc, 'mags dists lons lats pnes'.split()) maxdist = cmaker.maximum_distance(cmaker.trt) fildist = rupdata[cmaker.filter_distance + '_'] for ridx, sidx in enumerate(indices): if sidx == -1: # no contribution for this site continue dist = fildist[ridx][sidx] if dist >= maxdist: continue elif gsim.minimum_distance and dist < gsim.minimum_distance: dist = gsim.minimum_distance rctx = contexts.RuptureContext( (par, val[ridx]) for par, val in rupdata.items()) dctx = contexts.DistancesContext( (param, getattr(rctx, param + '_')[[sidx]]) for param in cmaker.REQUIRES_DISTANCES) acc['mags'].append(rctx.mag) acc['lons'].append(rctx.lon_[sidx]) acc['lats'].append(rctx.lat_[sidx]) acc['dists'].append(dist) with gmf_mon: mean_std = get_mean_std(sitecol, rctx, dctx, iml2.imts, [gsim])[..., 0] # (2, N, M) with pne_mon: iml = numpy.array([ to_distribution_values(lvl, imt) for imt, lvl in zip(iml2.imts, iml2) ]) # shape (M, P) pne = _disaggregate_pne(rctx, mean_std, iml, *eps3) acc['pnes'].append(pne) return pack(acc, 'mags dists lons lats pnes'.split())
def get_pmap(self, src, s_sites, rup_indep=True): """ :param src: a hazardlib source :param s_sites: the sites affected by it :returns: the probability map generated by the source """ imts = self.imts fewsites = len(s_sites.complete) <= self.max_sites_disagg rupdata = RupData(self) nrups, nsites = 0, 0 L, G = len(self.imtls.array), len(self.gsims) poemap = ProbabilityMap(L, G) dists = [] for rup, sites, maxdist in self._gen_rup_sites(src, s_sites): if maxdist is not None: dists.append(maxdist) try: with self.ctx_mon: r_sites, dctx = self.make_contexts(sites, rup, maxdist) except FarAwayRupture: continue with self.gmf_mon: mean_std = base.get_mean_std( # shape (2, N, M, G) r_sites, rup, dctx, imts, self.gsims) with self.poe_mon: pairs = zip(r_sites.sids, self._make_pnes(rup, mean_std)) with self.pne_mon: if rup_indep: for sid, pne in pairs: poemap.setdefault(sid, rup_indep).array *= pne else: for sid, pne in pairs: poemap.setdefault(sid, rup_indep).array += ( 1.-pne) * rup.weight nrups += 1 nsites += len(r_sites) if fewsites: # store rupdata rupdata.add(rup, src.id, r_sites, dctx) poemap.nrups = nrups poemap.nsites = nsites poemap.maxdist = numpy.mean(dists) if dists else None poemap.data = rupdata.data return poemap
def make_gmv(self, onesite, mags, dists): """ :param onesite: a SiteCollection instance with a single site :param mags: a sequence of magnitudes :param dists: a sequence of distances :returns: an array of GMVs of shape (#mags, #dists) """ assert len(onesite) == 1, onesite nmags, ndists = len(mags), len(dists) gmv = numpy.zeros((nmags, ndists)) for m, d in itertools.product(range(nmags), range(ndists)): mag, dist = mags[m], dists[d] rup = RuptureContext() for par in self.REQUIRES_RUPTURE_PARAMETERS: setattr(rup, par, 0) rup.mag = mag dctx = DistancesContext( (dst, numpy.array([dist])) for dst in self.REQUIRES_DISTANCES) mean = base.get_mean_std( # shape (2, N, M, G) -> G onesite, rup, dctx, [self.imts[-1]], self.gsims)[0, 0, 0] gmv[m, d] = numpy.exp(mean.max()) return gmv