Beispiel #1
0
    def test_mag_dist_outside_range(self):
        ctx = RuptureContext()
        # rupture with Mw = 3 (Mblg=2.9434938048208452) at rhypo = 1 must give
        # same mean as rupture with Mw = 4.4 (Mblg=4.8927897867183798) at
        # rhypo = 10
        ctx.mag = 2.9434938048208452
        ctx.rhypo = numpy.array([1])
        ctx.sids = [0]
        mean_mw3_d1, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            ctx, ctx, ctx, SA(0.1, 5), [StdDev.TOTAL])

        ctx.mag = 4.8927897867183798
        ctx.rhypo = numpy.array([10])
        mean_mw4pt4_d10, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            ctx, ctx, ctx, SA(0.1, 5), [StdDev.TOTAL])

        self.assertAlmostEqual(float(mean_mw3_d1), float(mean_mw4pt4_d10))

        # rupture with Mw = 9 (Mblg = 8.2093636421088814) at rhypo = 1500 km
        # must give same mean as rupture with Mw = 8.2
        # (Mblg = 7.752253535347597) at rhypo = 1000
        ctx.mag = 8.2093636421088814
        ctx.rhypo = numpy.array([1500.])
        mean_mw9_d1500, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            ctx, ctx, ctx, SA(0.1, 5), [StdDev.TOTAL])

        ctx.mag = 7.752253535347597
        ctx.rhypo = numpy.array([1000.])
        mean_mw8pt2_d1000, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            ctx, ctx, ctx, SA(0.1, 5), [StdDev.TOTAL])

        self.assertAlmostEqual(mean_mw9_d1500, mean_mw8pt2_d1000)
Beispiel #2
0
    def test_dist_not_in_increasing_order(self):
        ctx = RuptureContext()
        ctx.mag = 5.
        ctx.sids = [0, 1]
        ctx.rhypo = numpy.array([150, 100])
        mean_150_100, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            ctx, ctx, ctx, SA(0.1, 5), [StdDev.TOTAL])

        ctx.rhypo = numpy.array([100, 150])
        mean_100_150, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            ctx, ctx, ctx, SA(0.1, 5), [StdDev.TOTAL])
        self.assertAlmostEqual(mean_150_100[1], mean_100_150[0])
        self.assertAlmostEqual(mean_150_100[0], mean_100_150[1])
Beispiel #3
0
 def test_rhypo_smaller_than_15(self):
     # test the calculation in case of rhypo distances less than 15 km
     # (for rhypo=0 the distance term has a singularity). In this case the
     # method should return values equal to the ones obtained by clipping
     # distances at 15 km.
     ctx = RuptureContext()
     ctx.sids = [0, 1, 2]
     ctx.vs30 = numpy.array([800.0, 800.0, 800.0])
     ctx.mag = 5.0
     ctx.rake = 0
     ctx.rhypo = numpy.array([0.0, 10.0, 16.0])
     ctx.rhypo.flags.writeable = False
     mean_0, stds_0 = self.GSIM_CLASS().get_mean_and_stddevs(
         ctx, ctx, ctx, PGA(), [StdDev.TOTAL])
     mean_15, stds_15 = self.GSIM_CLASS().get_mean_and_stddevs(
         ctx, ctx, ctx, PGA(), [StdDev.TOTAL])
     numpy.testing.assert_array_equal(mean_0, mean_15)
     numpy.testing.assert_array_equal(stds_0, stds_15)
Beispiel #4
0
def trim_multiple_events(
    st,
    origin,
    catalog,
    travel_time_df,
    pga_factor,
    pct_window_reject,
    gmpe,
    site_parameters,
    rupture_parameters,
):
    """
    Uses a catalog (list of ScalarEvents) to handle cases where a trace might
    contain signals from multiple events. The catalog should contain events
    down to a low enough magnitude in relation to the events of interest.
    Overall, the algorithm is as follows:

    1) For each earthquake in the catalog, get the P-wave travel time
       and estimated PGA at this station.

    2) Compute the PGA (of the as-recorded horizontal channels).

    3) Select the P-wave arrival times across all events for this record
       that are (a) within the signal window, and (b) the predicted PGA is
       greater than pga_factor times the PGA from step #1.

    4) If any P-wave arrival times match the above criteria, then if any of
       the arrival times fall within in the first pct_window_reject*100%
       of the signal window, then reject the record. Otherwise, trim the
       record such that the end time does not include any of the arrivals
       selected in step #3.

    Args:
        st (StationStream):
            Stream of data.
        origin (ScalarEvent):
            ScalarEvent object associated with the StationStream.
        catalog (list):
            List of ScalarEvent objects.
        travel_time_df (DataFrame):
            A pandas DataFrame that contains the travel time information
            (obtained from
             gmprocess.waveform_processing.phase.create_travel_time_dataframe).
            The columns in the DataFrame are the station ids and the indices
            are the earthquake ids.
        pga_factor (float):
            A decimal factor used to determine whether the predicted PGA
            from an event arrival is significant enough that it should be
            considered for removal.
        pct_window_reject (float):
           A decimal from 0.0 to 1.0 used to determine if an arrival should
            be trimmed from the record, or if the entire record should be
            rejected. If the arrival falls within the first
            pct_window_reject * 100% of the signal window, then the entire
            record will be rejected. Otherwise, the record will be trimmed
            appropriately.
        gmpe (str):
            Short name of the GMPE to use. Must be defined in the modules file.
        site_parameters (dict):
            Dictionary of site parameters to input to the GMPE.
        rupture_parameters:
            Dictionary of rupture parameters to input to the GMPE.

    Returns:
        StationStream: Processed stream.

    """

    if not st.passed:
        return st

    # Check that we know the signal split for each trace in the stream
    for tr in st:
        if not tr.hasParameter("signal_split"):
            return st

    signal_window_starttime = st[0].getParameter("signal_split")["split_time"]

    arrivals = travel_time_df[st[0].stats.network + "." + st[0].stats.station]
    arrivals = arrivals.sort_values()

    # Filter by any arrival times that appear in the signal window
    arrivals = arrivals[(arrivals > signal_window_starttime)
                        & (arrivals < st[0].stats.endtime)]

    # Make sure we remove the arrival that corresponds to the event of interest
    if origin.id in arrivals.index:
        arrivals.drop(index=origin.id, inplace=True)

    if arrivals.empty:
        return st

    # Calculate the recorded PGA for this record
    stasum = StationSummary.from_stream(st, ["ROTD(50.0)"], ["PGA"])
    recorded_pga = stasum.get_pgm("PGA", "ROTD(50.0)")

    # Load the GMPE model
    gmpe = load_model(gmpe)

    # Generic context
    rctx = RuptureContext()

    # Make sure that site parameter values are converted to numpy arrays
    site_parameters_copy = site_parameters.copy()
    for k, v in site_parameters_copy.items():
        site_parameters_copy[k] = np.array([site_parameters_copy[k]])
    rctx.__dict__.update(site_parameters_copy)

    # Filter by arrivals that have significant expected PGA using GMPE
    is_significant = []
    for eqid, arrival_time in arrivals.items():
        event = next(event for event in catalog if event.id == eqid)

        # Set rupture parameters
        rctx.__dict__.update(rupture_parameters)
        rctx.mag = event.magnitude

        # TODO: distances should be calculated when we refactor to be
        # able to import distance calculations
        rctx.repi = np.array([
            gps2dist_azimuth(
                st[0].stats.coordinates.latitude,
                st[0].stats.coordinates.longitude,
                event.latitude,
                event.longitude,
            )[0] / 1000
        ])
        rctx.rjb = rctx.repi
        rctx.rhypo = np.sqrt(rctx.repi**2 + event.depth_km**2)
        rctx.rrup = rctx.rhypo
        rctx.sids = np.array(range(np.size(rctx.rrup)))
        pga, sd = gmpe.get_mean_and_stddevs(rctx, rctx, rctx, imt.PGA(), [])

        # Convert from ln(g) to %g
        predicted_pga = 100 * np.exp(pga[0])
        if predicted_pga > (pga_factor * recorded_pga):
            is_significant.append(True)
        else:
            is_significant.append(False)

    significant_arrivals = arrivals[is_significant]
    if significant_arrivals.empty:
        return st

    # Check if any of the significant arrivals occur within the
    signal_length = st[0].stats.endtime - signal_window_starttime
    cutoff_time = signal_window_starttime + pct_window_reject * (signal_length)
    if (significant_arrivals < cutoff_time).any():
        for tr in st:
            tr.fail("A significant arrival from another event occurs within "
                    "the first %s percent of the signal window" %
                    (100 * pct_window_reject))

    # Otherwise, trim the stream at the first significant arrival
    else:
        for tr in st:
            signal_end = tr.getParameter("signal_end")
            signal_end["end_time"] = significant_arrivals[0]
            signal_end["method"] = "Trimming before right another event"
            tr.setParameter("signal_end", signal_end)
        cut(st)

    return st