def test_calculation_addition_args(self): avg_periods = [0.05, 0.15, 1.0, 2.0, 4.0] gmm = GenericGmpeAvgSA(gmpe_name="KothaEtAl2020ESHM20", avg_periods=avg_periods, corr_func="akkar", sigma_mu_epsilon=1.0) rctx = RuptureContext() rctx.mag = 6. rctx.hypo_depth = 15. dctx = DistancesContext() dctx.rjb = np.array([1., 10., 30., 70.]) sctx = SitesContext() sctx.vs30 = 500.0 * np.ones(4) sctx.vs30measured = np.ones(4, dtype="bool") sctx.region = np.zeros(4, dtype=int) stdt = [const.StdDev.TOTAL] expected_mean = np.array( [-1.72305707, -2.2178751, -3.20100306, -4.19948242]) expected_stddev = np.array( [0.5532021, 0.5532021, 0.5532021, 0.5532021]) imtype = imt.AvgSA() mean, [stddev] = gmm.get_mean_and_stddevs(sctx, rctx, dctx, imtype, stdt) np.testing.assert_almost_equal(mean, expected_mean) np.testing.assert_almost_equal(stddev, expected_stddev)
def test_param(self): ctxs = [RuptureContext([('occurrence_rate', .001)]), RuptureContext([('occurrence_rate', .002)])] for poe in (.1, .5, .9): c1, pnes1 = compose(ctxs, poe) c2, pnes2 = compose(_collapse(ctxs), poe) aac(c1, c2) # the same
def test_calculation_addition_args(self): avg_periods = [0.05, 0.15, 1.0, 2.0, 4.0] gmm = GenericGmpeAvgSA(gmpe_name="KothaEtAl2019SERA", avg_periods=avg_periods, corr_func="akkar", sigma_mu_epsilon=1.0) rctx = RuptureContext() rctx.mag = 6. rctx.hypo_depth = 15. dctx = DistancesContext() dctx.rjb = np.array([1., 10., 30., 70.]) sctx = SitesContext() sctx.vs30 = 500.0 * np.ones(4) sctx.vs30measured = np.ones(4, dtype="bool") stdt = [const.StdDev.TOTAL] expected_mean = np.array( [-1.45586338, -1.94419233, -2.91884965, -3.91919928]) expected_stddev = np.array( [0.58317566, 0.58317566, 0.58317566, 0.58317566]) imtype = imt.AvgSA() mean, [stddev] = gmm.get_mean_and_stddevs(sctx, rctx, dctx, imtype, stdt) np.testing.assert_almost_equal(mean, expected_mean) np.testing.assert_almost_equal(stddev, expected_stddev)
def ctx(self, nsites, vs30): sites = Dummy.get_site_collection(nsites, vs30=vs30) rup = Dummy.get_rupture(mag=6.0) ctx = RuptureContext() ctx.sid = np.arange(nsites) vars(ctx).update(vars(rup)) for name in sites.array.dtype.names: setattr(ctx, name, sites[name]) return ctx
def test_mixed(self): ctxs = [RuptureContext([('occurrence_rate', .001)]), RuptureContext([('occurrence_rate', .002)]), RuptureContext([('occurrence_rate', numpy.nan), ('probs_occur', [.999, .001])]), RuptureContext([('occurrence_rate', numpy.nan), ('probs_occur', [.998, .002])])] for poe in (.1, .5, .9): c1, pnes1 = compose(ctxs, poe) c2, pnes2 = compose(_collapse(ctxs), poe) aac(c1, c2) # the same
def _prepare_ctxs(rupdata, cmaker, sitecol): ctxs = [] for u in range(len(rupdata['mag'])): ctx = RuptureContext() for par in rupdata: if not par.endswith('_'): setattr(ctx, par, rupdata[par][u]) else: # site-dependent parameter setattr(ctx, par[:-1], rupdata[par][u]) for par in cmaker.REQUIRES_SITES_PARAMETERS: setattr(ctx, par, sitecol[par]) ctx.sids = sitecol.sids ctxs.append(ctx) return ctxs
def test_calculation_Baker_Jayaram(self): DATA_FILE = data / 'GENERIC_GMPE_AVGSA_MEAN_STD_TOTAL_BAKER_JAYARAM.csv' # Initialise meta-GMPE mgmpe = gsim.mgmpe.generic_gmpe_avgsa.GenericGmpeAvgSA( gmpe_name='BooreAtkinson2008', avg_periods=[0.05, 0.15, 1.0, 2.0, 4.0], corr_func='baker_jayaram') ctx = RuptureContext() ctx.sids = [0] P = imt.AvgSA S = [const.StdDev.TOTAL] with open(DATA_FILE, 'r') as f: # Skip header for i in [1, 2, 3]: f.readline() for line in f: arr = np.float_(line.strip().split(',')) # Setting ground motion attributes ctx.mag = arr[0] ctx.rjb = np.array([arr[1]]) ctx.rake = arr[2] ctx.hypo_depth = arr[3] ctx.vs30 = np.array([arr[4]]) # Compute ground motion mean, stdv = mgmpe.get_mean_and_stddevs(ctx, ctx, ctx, P, S) np.testing.assert_almost_equal(mean, arr[6]) np.testing.assert_almost_equal(stdv, arr[7])
def create_context(self, evt_id, imts=None): """Create a new Context `dict`. Objects of this type will be yielded by `get_context`. :param evt_id: the earthquake id (e.g. int, or str) :param imts: a list of strings denoting the IMTs to be included in the context. If missing or None, the returned dict **will NOT** have the keys "Observations" and "Num. Sites" :return: the dict with keys: ``` { 'EventID': evt_id, 'Ctx: a new :class:`openquake.hazardlib.contexts.RuptureContext` 'Observations": dict[str, list] # (each imt in imts mapped to `[]`) 'Num. Sites': 0 } ``` NOTE: Remember 'Observations' and 'Num. Sites' are missing if `imts` is missing, None or an emtpy sequence. """ dic = {'EventID': evt_id, 'Ctx': RuptureContext()} if imts is not None and len(imts): dic["Observations"] = OrderedDict([(imt, []) for imt in imts]) dic["Num. Sites"] = 0 return dic
def stuff_context(sites, rup, dists): """ Function to fill a rupture context with the contents of all of the other contexts. Args: sites (SiteCollection): A SiteCollection object. rup (RuptureContext): A RuptureContext object. dists (DistanceContext): A DistanceContext object. Returns: RuptureContext: A new RuptureContext whose attributes are all of the elements of the three inputs. """ ctx = RuptureContext() for name in [name for name in vars(sites) if not name.startswith("__")]: setattr(ctx, name, getattr(sites, name)) for name in [name for name in vars(rup) if not name.startswith("__")]: setattr(ctx, name, getattr(rup, name)) for name in [name for name in vars(dists) if not name.startswith("__")]: setattr(ctx, name, getattr(dists, name)) return ctx
def setUp(self): fname = gettemp(ampl_func) df = read_csv(fname, { 'ampcode': ampcode_dt, None: numpy.float64 }, index='ampcode') self.df = AmplFunction(df) # Set GMMs gmmA = BooreAtkinson2008() gmmB = BooreEtAl2014() # Set parameters dsts = [10., 15., 20., 30., 40.] dsts = [10.] imts = [PGA(), SA(1.0)] sites = Dummy.get_site_collection(len(dsts), vs30=760.0) self.mag = 5.5 rup = Dummy.get_rupture(mag=self.mag) ctx = RuptureContext.full(rup, sites) ctx.rjb = numpy.array(dsts) ctx.rrup = numpy.array(dsts) self.rrup = ctx.rrup # Compute GM on rock self.meastd = gmmA.get_mean_std([ctx], imts) # shape (2, N=1, M=2)
def setUp(self): self.ctx = ctx = RuptureContext() ctx.mag = 6. ctx.rake = 0. ctx.hypo_depth = 10. sites = Dummy.get_site_collection(4, vs30=760.) for name in sites.array.dtype.names: setattr(ctx, name, sites[name]) ctx.rrup = np.array([1., 10., 30., 70.]) ctx.rjb = np.array([1., 10., 30., 70.]) self.imt = PGA()
def create_rupture_context(self, evt_id): '''Creates, initializes and returns a rupture context by setting the default values of the attributes defined in `self.rupture_context_attrs`. The returned context is intended to be used in `self.get_contexts`. :return: a :class:`openquake.hazardlib.contexts.RuptureContext` ''' ctx = RuptureContext() for _ in self.rupture_context_attrs: setattr(ctx, _, np.nan) return ctx
def setUp(self): self.ctx = ctx = RuptureContext() ctx.mag = 6.0 ctx.hypo_depth = 10. sites = Dummy.get_site_collection(4, amplfactor=[-1.0, 1.5, 0.00, -1.99]) for name in sites.array.dtype.names: setattr(ctx, name, sites[name]) ctx.rhypo = np.array([1., 10., 30., 70.]) ctx.repi = np.array([1., 10., 30., 70.]) self.imt = MMI()
def test_make_pmap(self): trunclevel = 3 imtls = DictArray({'PGA': [0.01]}) gsims = [valid.gsim('AkkarBommer2010')] ctxs = [] for occ_rate in (.001, .002): ctx = RuptureContext() ctx.mag = 5.5 ctx.rake = 90 ctx.occurrence_rate = occ_rate ctx.sids = numpy.array([0.]) ctx.vs30 = numpy.array([760.]) ctx.rrup = numpy.array([100.]) ctx.rjb = numpy.array([99.]) ctxs.append(ctx) pmap = make_pmap(ctxs, gsims, imtls, trunclevel, 50.) numpy.testing.assert_almost_equal(pmap[0].array, 0.066381)
def pt_src_are(self, pt_src, gsim, weight, lnSA, monitor): """ Returns the vector-valued Annual Rate of Exceedance for one single point-source :param pt_src: single instance of class "openquake.hazardlib.source.area.PointSource" :param gsim: tuple, containing (only one?) instance of Openquake GSIM class :param: weight, weight to be multiplied to ARE estimate :param lnSA: list, natural logarithm of acceleration values for each spectral period. Note : Values should be ordered in the same order than self.periods """ annual_rate = 0 # Loop over ruptures: # i.e. one rupture for each combination of (mag, nodal plane, hypocentral depth): for r in pt_src.iter_ruptures(): # NOTE: IF ACCOUNTING FOR "pointsource_distance" IN THE INI FILE, ONE SHOULD USE THE # "point_ruptures()" METHOD BELOW: # Loop over ruptures, one rupture for each magnitude ( neglect floating and combination on # nodal plane and hypocentral depth): ## for r in pt_src.point_ruptures(): # Note: Seismicity rate evenly distributed over all point sources # Seismicity rate also accounts for FMD (i.e. decreasing for # increasing magnitude value) # Filter the site collection with respect to the rupture and prepare context objects: context_maker = ContextMaker(r.tectonic_region_type, gsim) site_ctx, dist_ctx = context_maker.make_contexts(self.sites, r) rup_ctx = RuptureContext() rup_ctx.mag = r.mag rup_ctx.rake = r.rake assert len(gsim)==1 annual_rate += r.occurrence_rate * weight * self.gm_poe(gsim[0], dist_ctx, rup_ctx, site_ctx, lnSA) return annual_rate
def test_get_pmap(self): truncation_level = 3 imtls = DictArray({'PGA': [0.01]}) gsims = [valid.gsim('AkkarBommer2010')] ctxs = [] for occ_rate in (.001, .002): ctx = RuptureContext() ctx.mag = 5.5 ctx.rake = 90 ctx.occurrence_rate = occ_rate ctx.sids = numpy.array([0.]) ctx.vs30 = numpy.array([760.]) ctx.rrup = numpy.array([100.]) ctx.rjb = numpy.array([99.]) ctxs.append(ctx) cmaker = ContextMaker( 'TRT', gsims, dict(imtls=imtls, truncation_level=truncation_level)) cmaker.tom = PoissonTOM(time_span=50) pmap = cmaker.get_pmap(ctxs) numpy.testing.assert_almost_equal(pmap[0].array, 0.066381)
def _get_stds(self, within_absolute=None, between_absolute=None): if within_absolute is not None: gmm = SplitSigmaGMPE(gmpe_name='Campbell2003', within_absolute=within_absolute) elif between_absolute is not None: gmm = SplitSigmaGMPE(gmpe_name='Campbell2003', between_absolute=between_absolute) else: raise ValueError('Unknown option') # Set parameters ctx = RuptureContext() ctx.mag = 6.0 ctx.sids = [0, 1, 2, 3] ctx.vs30 = [760.] * 4 ctx.rrup = np.array([1., 10., 30., 70.]) ctx.occurrence_rate = .0001 imt = PGA() stds_types = [const.StdDev.TOTAL, const.StdDev.INTER_EVENT, const.StdDev.INTRA_EVENT] # Compute results mean, stds = gmm.get_mean_and_stddevs(ctx, ctx, ctx, imt, stds_types) return stds, stds_types
def _parse_csv_line(headers, values, req_site_params): """ Parse a single line from data file. :param headers: A list of header names, the strings from the first line of csv file. :param values: A list of values of a single row to parse. :returns: A tuple of the following values (in specified order): sctx An instance of :class:`openquake.hazardlib.gsim.base.SitesContext` with attributes populated by the information from in row in a form of single-element numpy arrays. rctx An instance of :class:`openquake.hazardlib.gsim.base.RuptureContext`. dctx An instance of :class:`openquake.hazardlib.gsim.base.DistancesContext`. stddev_types An empty list, if the ``result_type`` column says "MEAN" for that row, otherwise it is a list with one item -- a requested standard deviation type. expected_results A dictionary mapping IMT-objects to one-element arrays of expected result values. Those results represent either standard deviation or mean value of corresponding IMT depending on ``result_type``. result_type A string literal, one of ``'STDDEV'`` or ``'MEAN'``. Value is taken from column ``result_type``. """ rctx = RuptureContext() sctx = SitesContext(slots=req_site_params) dctx = DistancesContext() expected_results = {} stddev_types = result_type = damping = None for param, value in zip(headers, values): if param == 'result_type': value = value.upper() if value.endswith('_STDDEV'): # the row defines expected stddev results result_type = 'STDDEV' stddev_types = [getattr(const.StdDev, value[:-len('_STDDEV')])] else: # the row defines expected exponents of mean values assert value == 'MEAN' stddev_types = [] result_type = 'MEAN' elif param == 'damping': damping = float(value) elif param.startswith('site_'): # value is sites context object attribute if param == 'site_vs30measured' or param == 'site_backarc': value = float(value) != 0 elif param in ('site_siteclass', 'site_ec8', 'site_ec8_p18', 'site_geology'): value = numpy.string_(value) else: value = float(value) # site_lons, site_lats, site_depths -> lon, lat, depth if param.endswith(('lons', 'lats', 'depths')): attr = param[len('site_'):-1] else: # vs30s etc attr = param[len('site_'):] setattr(sctx, attr, numpy.array([value])) elif param.startswith('dist_'): # value is a distance measure value = float(value) setattr(dctx, param[len('dist_'):], numpy.array([value])) elif param.startswith('rup_'): # value is a rupture context attribute try: value = float(value) except ValueError: if value != 'undefined': raise setattr(rctx, param[len('rup_'):], value) elif param == 'component_type': pass else: # value is the expected result (of result_type type) value = float(value) if param == 'arias': # ugly legacy corner case param = 'ia' if param == 'avgsa': imt = from_string('AvgSA') else: try: # The title of the column should be IMT(args) imt = from_string(param.upper()) except KeyError: # Then it is just a period for SA imt = registry['SA'](float(param), damping) expected_results[imt] = numpy.array([value]) assert result_type is not None return sctx, rctx, dctx, stddev_types, expected_results, result_type
def compute_disagg(dstore, idxs, cmaker, iml3, trti, bin_edges, oq, monitor): # see https://bugs.launchpad.net/oq-engine/+bug/1279247 for an explanation # of the algorithm used """ :param dstore a DataStore instance :param idxs: an array of indices to ruptures :param cmaker: a :class:`openquake.hazardlib.gsim.base.ContextMaker` instance :param iml3: an ArrayWrapper of shape (N, P, Z) with an attribute imt :param trti: tectonic region type index :param bin_egdes: a quintet (mag_edges, dist_edges, lon_edges, lat_edges, eps_edges) :param monitor: monitor of the currently running job :returns: a dictionary sid -> 8D-array """ with monitor('reading rupdata', measuremem=True): dstore.open('r') sitecol = dstore['sitecol'] rupdata = {k: dstore['rup/' + k][idxs] for k in dstore['rup']} RuptureContext.temporal_occurrence_model = PoissonTOM( oq.investigation_time) pne_mon = monitor('disaggregate_pne', measuremem=False) mat_mon = monitor('build_disagg_matrix', measuremem=True) gmf_mon = monitor('disagg mean_std', measuremem=False) for sid, iml2 in zip(sitecol.sids, iml3): singlesite = sitecol.filtered([sid]) bins = disagg.get_bins(bin_edges, sid) gsim_by_z = {} for z in range(iml3.shape[-1]): try: gsim = cmaker.gsim_by_rlzi[iml3.rlzs[sid, z]] except KeyError: pass else: gsim_by_z[z] = gsim ctxs = [] ok, = numpy.where( rupdata['rrup_'][:, sid] <= cmaker.maximum_distance(cmaker.trt)) for ridx in ok: # consider only the ruptures close to the site ctx = RuptureContext((par, rupdata[par][ridx]) for par in rupdata if not par.endswith('_')) for par in rupdata: if par.endswith('_'): setattr(ctx, par[:-1], rupdata[par][ridx, [sid]]) ctxs.append(ctx) if not ctxs: continue eps3 = disagg._eps3(cmaker.trunclevel, oq.num_epsilon_bins) matrix = numpy.zeros([len(b) - 1 for b in bins] + list(iml2.shape)) for z, gsim in gsim_by_z.items(): with gmf_mon: ms = disagg.get_mean_stdv(singlesite, ctxs, iml3.imt, gsim) bdata = disagg.disaggregate( ms, ctxs, iml3.imt, iml2[:, z], eps3, pne_mon) if bdata.pnes.sum(): with mat_mon: matrix[..., z] = disagg.build_disagg_matrix(bdata, bins) if matrix.any(): yield {'trti': trti, 'imti': iml3.imti, sid: matrix}
def _parse_csv_line(headers, values): """ Parse a single line from data file. :param headers: A list of header names, the strings from the first line of csv file. :param values: A list of values of a single row to parse. :returns: A tuple of the following values (in specified order): sctx An instance of :class:`openquake.hazardlib.gsim.base.SitesContext` with attributes populated by the information from in row in a form of single-element numpy arrays. rctx An instance of :class:`openquake.hazardlib.gsim.base.RuptureContext`. dctx An instance of :class:`openquake.hazardlib.gsim.base.DistancesContext`. stddev_types An empty list, if the ``result_type`` column says "MEAN" for that row, otherwise it is a list with one item -- a requested standard deviation type. expected_results A dictionary mapping IMT-objects to one-element arrays of expected result values. Those results represent either standard deviation or mean value of corresponding IMT depending on ``result_type``. result_type A string literal, one of ``'STDDEV'`` or ``'MEAN'``. Value is taken from column ``result_type``. """ rctx = RuptureContext() sctx = SitesContext() dctx = DistancesContext() expected_results = {} stddev_types = result_type = damping = None for param, value in zip(headers, values): if param == 'result_type': value = value.upper() if value.endswith('_STDDEV'): # the row defines expected stddev results result_type = 'STDDEV' stddev_types = [getattr(const.StdDev, value[:-len('_STDDEV')])] else: # the row defines expected exponents of mean values assert value == 'MEAN' stddev_types = [] result_type = 'MEAN' elif param == 'damping': damping = float(value) elif param.startswith('site_'): # value is sites context object attribute if (param == 'site_vs30measured') or (param == 'site_backarc'): value = float(value) != 0 else: value = float(value) setattr(sctx, param[len('site_'):], numpy.array([value])) elif param.startswith('dist_'): # value is a distance measure value = float(value) setattr(dctx, param[len('dist_'):], numpy.array([value])) elif param.startswith('rup_'): # value is a rupture context attribute value = float(value) setattr(rctx, param[len('rup_'):], value) elif param == 'component_type': pass else: # value is the expected result (of result_type type) value = float(value) if param == 'pga': imt = PGA() elif param == 'pgv': imt = PGV() elif param == 'pgd': imt = PGD() elif param == 'cav': imt = CAV() elif param == 'mmi': imt = MMI() elif param == "arias": imt = IA() elif param == "rsd595": imt = RSD595() elif param == "rsd575": imt = RSD575() elif param == "rsd2080": imt = RSD2080() else: period = float(param) assert damping is not None imt = SA(period, damping) expected_results[imt] = numpy.array([value]) assert result_type is not None return sctx, rctx, dctx, stddev_types, expected_results, result_type