Ejemplo n.º 1
0
def gen_ctxs(df):
    """
    :param df: a DataFrame with a specific structure
    :yields: RuptureContexts
    """
    rrp = [col for col in df.columns if col.startswith('rup_')]
    pars = [col for col in df.columns if col.startswith(('dist_', 'site_'))]
    outs = df.result_type.unique()
    num_outs = len(outs)
    for rup_params, grp in df.groupby(rrp):
        inputs = [
            gr[rrp + pars].to_numpy() for _, gr in grp.groupby('result_type')
        ]
        if len(inputs) < num_outs:
            dic = dict(zip(rrp + pars, inputs[0][0]))
            print('\nMissing some data for %s' % dic)
            continue
        assert all_equals(inputs), 'Use NORMALIZE=True'
        if len(rrp) == 1:
            rup_params = [rup_params]
        ctx = contexts.RuptureContext()
        for par, rp in zip(rrp, rup_params):
            setattr(ctx, par[4:], rp)
            del grp[par]
        if 'damping' in grp.columns:
            del grp['damping']
        for rtype, gr in grp.groupby('result_type'):
            del gr['result_type']
            setattr(ctx, rtype, gr)
        for par in pars:
            value = grp[grp.result_type == outs[0]][par].to_numpy()
            setattr(ctx, par[5:], value)  # dist_, site_ parameters
        ctx.sids = np.arange(len(gr))
        assert len(gr) == len(grp) / num_outs, (len(gr), len(gr) / num_outs)
        yield ctx
Ejemplo n.º 2
0
def disaggregate(cmaker,
                 sitecol,
                 rupdata,
                 iml2,
                 truncnorm,
                 epsilons,
                 monitor=Monitor()):
    """
    Disaggregate (separate) PoE in different contributions.

    :param cmaker: a ContextMaker instance
    :param sitecol: a SiteCollection with N=1 site
    :param ruptures: an iterator over ruptures with the same TRT
    :param iml2: a 2D array of IMLs of shape (M, P)
    :param truncnorm: an instance of scipy.stats.truncnorm
    :param epsilons: the epsilon bins
    :param monitor: a Monitor instance
    :returns:
        an AccumDict with keys (poe, imt, rlzi) and mags, dists, lons, lats
    """
    assert len(sitecol) == 1, sitecol
    acc = AccumDict(accum=[], mags=[], dists=[], lons=[], lats=[])
    try:
        gsim = cmaker.gsim_by_rlzi[iml2.rlzi]
    except KeyError:
        return acc
    pne_mon = monitor('disaggregate_pne', measuremem=False)
    [sid] = sitecol.sids
    acc['mags'] = rupdata['mag']
    acc['lons'] = rupdata['lon'][:, sid]
    acc['lats'] = rupdata['lat'][:, sid]
    acc['dists'] = dists = rupdata[cmaker.filter_distance][:, sid]
    if gsim.minimum_distance:
        dists[dists < gsim.minimum_distance] = gsim.minimum_distance
    # compute epsilon bin contributions only once
    eps_bands = truncnorm.cdf(epsilons[1:]) - truncnorm.cdf(epsilons[:-1])
    for rec in rupdata:
        rctx = contexts.RuptureContext(rec)
        dctx = contexts.DistancesContext(
            (param, rec[param][[sid]])
            for param in cmaker.REQUIRES_DISTANCES).roundup(
                gsim.minimum_distance)
        for m, imt in enumerate(iml2.imts):
            for p, poe in enumerate(iml2.poes_disagg):
                iml = iml2[m, p]
                with pne_mon:
                    pne = disaggregate_pne(gsim, rctx, sitecol, dctx, imt, iml,
                                           truncnorm, epsilons, eps_bands)
                acc[poe, str(imt), iml2.rlzi].append(pne)
    return acc
Ejemplo n.º 3
0
def _disaggregate(cmaker,
                  sitecol,
                  rupdata,
                  indices,
                  iml2,
                  eps3,
                  pne_mon=performance.Monitor()):
    # disaggregate (separate) PoE in different contributions
    # returns AccumDict with keys (poe, imt) and mags, dists, lons, lats
    [sid] = sitecol.sids
    acc = AccumDict(accum=[],
                    mags=[],
                    dists=[],
                    lons=[],
                    lats=[],
                    M=len(iml2.imts),
                    P=len(iml2.poes_disagg))
    try:
        gsim = cmaker.gsim_by_rlzi[iml2.rlzi]
    except KeyError:
        return pack(acc, 'mags dists lons lats P M'.split())
    maxdist = cmaker.maximum_distance(cmaker.trt)
    fildist = rupdata[cmaker.filter_distance + '_']
    for ridx, sidx in enumerate(indices):
        if sidx == -1:  # no contribution for this site
            continue
        dist = fildist[ridx][sidx]
        if dist >= maxdist:
            continue
        elif gsim.minimum_distance and dist < gsim.minimum_distance:
            dist = gsim.minimum_distance
        rctx = contexts.RuptureContext()
        for par in rupdata:
            setattr(rctx, par, rupdata[par][ridx])
        dctx = contexts.DistancesContext(
            (param, getattr(rctx, param + '_')[[sidx]])
            for param in cmaker.REQUIRES_DISTANCES).roundup(
                gsim.minimum_distance)
        acc['mags'].append(rctx.mag)
        acc['lons'].append(rctx.lon_[sidx])
        acc['lats'].append(rctx.lat_[sidx])
        acc['dists'].append(dist)
        with pne_mon:
            for m, imt in enumerate(iml2.imts):
                for p, poe in enumerate(iml2.poes_disagg):
                    iml = iml2[m, p]
                    pne = disaggregate_pne(gsim, rctx, sitecol, dctx, imt, iml,
                                           *eps3)
                    acc[p, m].append(pne)
    return pack(acc, 'mags dists lons lats P M'.split())
Ejemplo n.º 4
0
def _disaggregate(cmaker,
                  sitecol,
                  rupdata,
                  indices,
                  iml2,
                  eps3,
                  pne_mon=performance.Monitor(),
                  gmf_mon=performance.Monitor()):
    # disaggregate (separate) PoE in different contributions
    # returns AccumDict with keys (poe, imt) and mags, dists, lons, lats
    [sid] = sitecol.sids
    acc = dict(pnes=[], mags=[], dists=[], lons=[], lats=[])
    try:
        gsim = cmaker.gsim_by_rlzi[iml2.rlzi]
    except KeyError:
        return pack(acc, 'mags dists lons lats pnes'.split())
    maxdist = cmaker.maximum_distance(cmaker.trt)
    fildist = rupdata[cmaker.filter_distance + '_']
    for ridx, sidx in enumerate(indices):
        if sidx == -1:  # no contribution for this site
            continue
        dist = fildist[ridx][sidx]
        if dist >= maxdist:
            continue
        elif gsim.minimum_distance and dist < gsim.minimum_distance:
            dist = gsim.minimum_distance
        rctx = contexts.RuptureContext(
            (par, val[ridx]) for par, val in rupdata.items())
        dctx = contexts.DistancesContext(
            (param, getattr(rctx, param + '_')[[sidx]])
            for param in cmaker.REQUIRES_DISTANCES)
        acc['mags'].append(rctx.mag)
        acc['lons'].append(rctx.lon_[sidx])
        acc['lats'].append(rctx.lat_[sidx])
        acc['dists'].append(dist)
        with gmf_mon:
            mean_std = get_mean_std(sitecol, rctx, dctx, iml2.imts,
                                    [gsim])[..., 0]  # (2, N, M)
        with pne_mon:
            iml = numpy.array([
                to_distribution_values(lvl, imt)
                for imt, lvl in zip(iml2.imts, iml2)
            ])  # shape (M, P)
            pne = _disaggregate_pne(rctx, mean_std, iml, *eps3)
            acc['pnes'].append(pne)
    return pack(acc, 'mags dists lons lats pnes'.split())
Ejemplo n.º 5
0
def oq_run(
    model: Enum,
    tect_type: Enum,
    rupture_df: pd.DataFrame,
    im: str,
    periods: Sequence[Union[int, float]] = None,
    **kwargs,
):
    """Run an openquake model with dataframe
    model: Enum
        OQ model name
    tect_type: Enum
        One of the tectonic types from
        ACTIVE_SHALLOW, SUBDUCTION_SLAB and SUBDUCTION_INTERFACE
    rupture_df: Rupture DF
        Columns for properties. E.g., vs30, z1pt0, rrup, rjb, mag, rake, dip....
        Rows be the separate site-fault pairs
        But Site information must be identical across the rows,
        only the faults can be different.
    im: string
        intensity measure
    periods: Sequence[Union[int, float]]
        for spectral acceleration, openquake tables automatically
        interpolate values between specified values, fails if outside range
    kwargs: pass extra (model specific) parameters to models
    """
    model = (OQ_MODELS[model][tect_type](
        **kwargs) if not model.name.endswith("_NZ") else
             OQ_MODELS[model][tect_type](region="NZL", **kwargs))

    # Check the given tect_type with its model's tect type
    trt = model.DEFINED_FOR_TECTONIC_REGION_TYPE
    if trt == const.TRT.SUBDUCTION_INTERFACE:
        assert tect_type == TectType.SUBDUCTION_INTERFACE
    elif trt == const.TRT.SUBDUCTION_INTRASLAB:
        assert tect_type == TectType.SUBDUCTION_SLAB
    elif trt == const.TRT.ACTIVE_SHALLOW_CRUST:
        assert tect_type == TectType.ACTIVE_SHALLOW
    else:
        raise ValueError("unknown tectonic region: " + trt)

    stddev_types = [
        std for std in SPT_STD_DEVS
        if std in model.DEFINED_FOR_STANDARD_DEVIATION_TYPES
    ]

    # Make a copy in case the original rupture_df used with other functions
    rupture_df = rupture_df.copy()

    # Check if df contains what model requires
    rupture_ctx_properties = set(rupture_df.columns.values)
    extra_site_parameters = set(
        model.REQUIRES_SITES_PARAMETERS).difference(rupture_ctx_properties)
    if len(extra_site_parameters) > 0:
        raise ValueError("unknown site property: " + extra_site_parameters)

    extra_rup_properties = set(
        model.REQUIRES_RUPTURE_PARAMETERS).difference(rupture_ctx_properties)
    if len(extra_rup_properties) > 0:
        raise ValueError("unknown rupture property: " +
                         " ".join(extra_rup_properties))

    extra_dist_properties = set(
        model.REQUIRES_DISTANCES).difference(rupture_ctx_properties)
    if len(extra_dist_properties) > 0:
        raise ValueError("unknown distance property: " +
                         " ".join(extra_dist_properties))

    # Convert z1pt0 from km to m
    rupture_df["z1pt0"] *= 1000
    # OQ's single new-style context which contains all site, distance and rupture's information
    rupture_ctx = contexts.RuptureContext(
        tuple([
            # Openquake requiring occurrence_rate attribute to exist
            ("occurrence_rate", None),
            # sids is the number of sites provided (OQ term)
            # This term needs to be repeated for the number of rows in the df
            ("sids", [1] * rupture_df.shape[0]),
            *((
                column,
                rupture_df.loc[:, column].values,
            ) for column in rupture_df.columns.values),
        ]))

    if periods is not None:
        assert imt.SA in model.DEFINED_FOR_INTENSITY_MEASURE_TYPES
        # use sorted instead of max for full list
        avail_periods = np.asarray([
            im.period
            for im in (model.COEFFS.sa_coeffs.keys() if not isinstance(
                model,
                (
                    gsim.zhao_2006.ZhaoEtAl2006Asc,
                    gsim.zhao_2006.ZhaoEtAl2006SSlab,
                    gsim.zhao_2006.ZhaoEtAl2006SInter,
                ),
            ) else model.COEFFS_ASC.sa_coeffs.keys())
        ])
        max_period = max(avail_periods)
        if not hasattr(periods, "__len__"):
            periods = [periods]
        results = []
        for period in periods:
            im = imt.SA(period=min(period, max_period))
            try:
                result = oq_mean_stddevs(model, rupture_ctx, im, stddev_types)
            except KeyError as ke:
                cause = ke.args[0]
                # To make sure the KeyError is about missing pSA's period
                if (isinstance(cause, imt.IMT) and str(cause).startswith("SA")
                        and cause.period > 0.0):
                    # Period is smaller than model's supported min_period E.g., ZA_06
                    # Interpolate between PGA(0.0) and model's min_period
                    low_result = oq_mean_stddevs(model, rupture_ctx, imt.PGA(),
                                                 stddev_types)
                    high_period = avail_periods[period <= avail_periods][0]
                    high_result = oq_mean_stddevs(model, rupture_ctx,
                                                  imt.SA(period=high_period),
                                                  stddev_types)

                    result = interpolate_with_pga(period, high_period,
                                                  low_result, high_result)
                else:
                    # KeyError that we cannot handle
                    logging.exception(ke)
                    raise
            except Exception as e:
                # Any other exceptions that we cannot handle
                logging.exception(e)
                raise

            # extrapolate pSA value up based on maximum available period
            if period > max_period:
                result.loc[:,
                           result.columns.str.endswith("mean")] += 2 * np.log(
                               max_period / period)
                # Updating the period from max_period to the given period
                # E.g with ZA_06, replace 5.0 to period > 5.0
                result.columns = result.columns.str.replace(str(max_period),
                                                            str(period),
                                                            regex=False)
            results.append(result)

        return pd.concat(results, axis=1)
    else:
        imc = getattr(imt, im)
        assert imc in model.DEFINED_FOR_INTENSITY_MEASURE_TYPES
        return oq_mean_stddevs(model, rupture_ctx, imc(), stddev_types)