def test_signal_split2():
    datafiles, origin = read_data_dir("knet", "us2000cnnl", "AOM0011801241951*")
    streams = []
    for datafile in datafiles:
        streams += read_data(datafile)

    streams = StreamCollection(streams)
    stream = streams[0]
    signal_split(stream, origin)

    cmpdict = {
        "split_time": UTCDateTime(2018, 1, 24, 10, 51, 39, 841483),
        "method": "p_arrival",
        "picker_type": "travel_time",
    }

    pdict = stream[0].getParameter("signal_split")
    for key, value in cmpdict.items():
        v1 = pdict[key]
        # because I can't figure out how to get utcdattime __eq__
        # operator to behave as expected with the currently installed
        # version of obspy, we're going to pedantically compare two
        # of these objects...
        if isinstance(value, UTCDateTime):
            # value.__precision = 4
            # v1.__precision = 4
            assert value.year == v1.year
            assert value.month == v1.month
            assert value.day == v1.day
            assert value.hour == v1.hour
            assert value.minute == v1.minute
            assert value.second == v1.second
        else:
            assert v1 == value
def test_fit_spectra():
    config = get_config()
    datapath = os.path.join('data', 'testdata', 'demo', 'ci38457511', 'raw')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)
    event = get_event_object('ci38457511')
    sc = StreamCollection.from_directory(datadir)
    for st in sc:
        st = signal_split(st, event)
        end_conf = config['windows']['signal_end']
        st = signal_end(st,
                        event_time=event.time,
                        event_lon=event.longitude,
                        event_lat=event.latitude,
                        event_mag=event.magnitude,
                        **end_conf)
        st = compute_snr(st, 30)
        st = get_corner_frequencies(st,
                                    method='constant',
                                    constant={
                                        'highpass': 0.08,
                                        'lowpass': 20.0
                                    })

    for st in sc:
        spectrum.fit_spectra(st, event)
def test_fit_spectra():
    config = get_config()
    datapath = os.path.join("data", "testdata", "demo", "ci38457511", "raw")
    datadir = pkg_resources.resource_filename("gmprocess", datapath)
    event = get_event_object("ci38457511")
    sc = StreamCollection.from_directory(datadir)
    for st in sc:
        st = signal_split(st, event)
        end_conf = config["windows"]["signal_end"]
        st = signal_end(st,
                        event_time=event.time,
                        event_lon=event.longitude,
                        event_lat=event.latitude,
                        event_mag=event.magnitude,
                        **end_conf)
        st = compute_snr(st, 30)
        st = get_corner_frequencies(st,
                                    event,
                                    method="constant",
                                    constant={
                                        "highpass": 0.08,
                                        "lowpass": 20.0
                                    })

    for st in sc:
        spectrum.fit_spectra(st, event)
def _test_trim_multiple_events():
    datapath = os.path.join("data", "testdata", "multiple_events")
    datadir = pkg_resources.resource_filename("gmprocess", datapath)
    sc = StreamCollection.from_directory(os.path.join(datadir, "ci38457511"))
    origin = get_event_object("ci38457511")
    df, catalog = create_travel_time_dataframe(
        sc, os.path.join(datadir, "catalog.csv"), 5, 0.1, "iasp91"
    )
    for st in sc:
        st.detrend("demean")
        remove_response(st, None, None)
        st = corner_frequencies.from_constant(st)
        lowpass_filter(st)
        highpass_filter(st)
        signal_split(st, origin)
        signal_end(
            st,
            origin.time,
            origin.longitude,
            origin.latitude,
            origin.magnitude,
            method="model",
            model="AS16",
        )
        cut(st, 2)
        trim_multiple_events(
            st, origin, catalog, df, 0.2, 0.7, "B14", {"vs30": 760}, {"rake": 0}
        )

    num_failures = sum([1 if not st.passed else 0 for st in sc])
    assert num_failures == 2

    failure = sc.select(station="WRV2")[0][0].getParameter("failure")
    assert failure["module"] == "trim_multiple_events"
    assert failure["reason"] == (
        "A significant arrival from another event "
        "occurs within the first 70.0 percent of the "
        "signal window"
    )

    for tr in sc.select(station="JRC2")[0]:
        np.testing.assert_almost_equal(
            tr.stats.endtime, UTCDateTime("2019-07-06T03:20:56.368300Z")
        )
Esempio n. 5
0
def test_trim_multiple_events():
    datapath = os.path.join('data', 'testdata', 'multiple_events')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)
    sc = StreamCollection.from_directory(os.path.join(datadir, 'ci38457511'))
    origin = get_event_object('ci38457511')
    df, catalog = create_travel_time_dataframe(
        sc, os.path.join(datadir, 'catalog.csv'), 5, 0.1, 'iasp91')
    for st in sc:
        st.detrend('demean')
        remove_response(st, None, None)
        st = corner_frequencies.get_constant(st)
        lowpass_filter(st)
        highpass_filter(st)
        signal_split(st, origin)
        signal_end(st,
                   origin.time,
                   origin.longitude,
                   origin.latitude,
                   origin.magnitude,
                   method='model',
                   model='AS16')
        cut(st, 2)
        trim_multiple_events(st, origin, catalog, df, 0.2, 0.7, 'B14',
                             {'vs30': 760}, {'rake': 0})

    num_failures = sum([1 if not st.passed else 0 for st in sc])
    assert num_failures == 1

    failure = sc.select(station='WRV2')[0][0].getParameter('failure')
    assert failure['module'] == 'trim_multiple_events'
    assert failure['reason'] == ('A significant arrival from another event '
                                 'occurs within the first 70.0 percent of the '
                                 'signal window')

    for tr in sc.select(station='JRC2')[0]:
        np.testing.assert_almost_equal(
            tr.stats.endtime, UTCDateTime('2019-07-06T03:20:38.7983Z'))
def _test_signal_split():

    st1 = read_data(os.path.join(data_path, "AOM0170806140843.EW"))[0]
    st2 = read_data(os.path.join(data_path, "AOM0170806140843.NS"))[0]
    st3 = read_data(os.path.join(data_path, "AOM0170806140843.UD"))[0]
    st = st1 + st2 + st3

    # Test the AR pick
    PICKER_CONFIG["order_of_preference"] = ["ar", "baer", "cwb"]
    signal_split(st, method="p_arrival", picker_config=PICKER_CONFIG)

    known_arrival = UTCDateTime(2008, 6, 13, 23, 44, 17)
    for tr in st:
        picker_arrival = tr.getParameter("signal_split")["split_time"]
        assert abs(picker_arrival - known_arrival) < 1

    # Test the AR pick without 3 components - defaulting to Baer picker
    # reset the processing parameters...
    for trace in st:
        trace.stats.parameters = []
    st[0].stats.channel = "--"
    signal_split(st, method="p_arrival", picker_config=PICKER_CONFIG)

    for tr in st:
        signal_split_info = tr.getParameter("signal_split")
        picker_arrival = signal_split_info["split_time"]
        assert abs(picker_arrival - known_arrival) < 1
        assert signal_split_info["picker_type"] == "baer"

    # Test CWB picker
    # reset the processing parameters...

    # TODO - uncomment this and fix!!
    # for trace in st:
    #     trace.stats.parameters = []
    # PICKER_CONFIG['order_of_preference'][0] = 'cwb'
    # signal_split(st, method='p_arrival', picker_config=PICKER_CONFIG)
    # for tr in st:
    #     signal_split_info = tr.getParameter('signal_split')
    #     picker_arrival = signal_split_info['split_time']
    #     assert abs(picker_arrival - known_arrival) < 1
    #     assert signal_split_info['picker_type'] == 'cwb'

    # Test velocity split
    # reset the processing parameters...
    for trace in st:
        trace.stats.parameters = []
    signal_split(
        st,
        event_time=UTCDateTime("2008-06-13 23:43:45"),
        event_lon=140.881,
        event_lat=39.030,
        method="velocity",
    )
    for tr in st:
        signal_split_info = tr.getParameter("signal_split")
        assert signal_split_info["method"] == "velocity"
        assert signal_split_info["picker_type"] is None

    # Test an invalid picker type
    PICKER_CONFIG["order_of_preference"][0] = "invalid"
    success = False
    try:
        signal_split(st, method="p_arrival", picker_config=PICKER_CONFIG)
        success = True
    except ValueError:
        pass
    assert success is False

    # Test an invalid split method
    success = False
    try:
        signal_split(st, method="invalid")
        success = True
    except ValueError:
        pass
    assert success is False
Esempio n. 7
0
def test_corner_frequencies():
    # Default config has 'constant' corner frequency method, so the need
    # here is to force the 'snr' method.
    data_files, origin = read_data_dir("geonet", "us1000778i", "*.V1A")
    streams = []
    for f in data_files:
        streams += read_data(f)

    sc = StreamCollection(streams)

    config = get_config()

    window_conf = config["windows"]

    processed_streams = sc.copy()
    for st in processed_streams:
        if st.passed:
            # Estimate noise/signal split time
            event_time = origin.time
            event_lon = origin.longitude
            event_lat = origin.latitude
            st = signal_split(st, origin)

            # Estimate end of signal
            end_conf = window_conf["signal_end"]
            event_mag = origin.magnitude
            print(st)
            st = signal_end(st,
                            event_time=event_time,
                            event_lon=event_lon,
                            event_lat=event_lat,
                            event_mag=event_mag,
                            **end_conf)
            wcheck_conf = window_conf["window_checks"]
            st = window_checks(
                st,
                min_noise_duration=wcheck_conf["min_noise_duration"],
                min_signal_duration=wcheck_conf["min_signal_duration"],
            )

    pconfig = config["processing"]

    # Run SNR check
    # I think we don't do this anymore.
    test = [d for d in pconfig if list(d.keys())[0] == "compute_snr"]
    snr_config = test[0]["compute_snr"]
    snr_config["check"]["min_freq"] = 0.2
    for stream in processed_streams:
        stream = compute_snr(stream, mag=origin.magnitude, **snr_config)

    # Run get_corner_frequencies
    test = [
        d for d in pconfig if list(d.keys())[0] == "get_corner_frequencies"
    ]
    cf_config = test[0]["get_corner_frequencies"]
    snr_config = cf_config["snr"]

    # With same_horiz False
    snr_config["same_horiz"] = False

    lp = []
    hp = []
    for stream in processed_streams:
        if not stream.passed:
            continue
        stream = get_corner_frequencies(stream, method="snr", snr=snr_config)
        if stream[0].hasParameter("corner_frequencies"):
            cfdict = stream[0].getParameter("corner_frequencies")
            lp.append(cfdict["lowpass"])
            hp.append(cfdict["highpass"])
    np.testing.assert_allclose(np.sort(hp),
                               [0.00467919, 0.00584742, 0.01026485],
                               atol=1e-6)

    st = processed_streams.select(station="HSES")[0]
    lps = [tr.getParameter("corner_frequencies")["lowpass"] for tr in st]
    hps = [tr.getParameter("corner_frequencies")["highpass"] for tr in st]
    np.testing.assert_allclose(np.sort(lps), [100.0, 100.0, 100.0], atol=1e-6)
    np.testing.assert_allclose(np.sort(hps),
                               [0.00542478, 0.01026485, 0.02527502],
                               atol=1e-6)

    # With same_horiz True
    snr_config["same_horiz"] = True

    lp = []
    hp = []
    for stream in processed_streams:
        if not stream.passed:
            continue
        stream = get_corner_frequencies(stream, method="snr", snr=snr_config)
        if stream[0].hasParameter("corner_frequencies"):
            cfdict = stream[0].getParameter("corner_frequencies")
            lp.append(cfdict["lowpass"])
            hp.append(cfdict["highpass"])

    np.testing.assert_allclose(np.sort(hp),
                               [0.00467919, 0.01026485, 0.01787214],
                               atol=1e-6)

    st = processed_streams.select(station="HSES")[0]
    lps = [tr.getParameter("corner_frequencies")["lowpass"] for tr in st]
    hps = [tr.getParameter("corner_frequencies")["highpass"] for tr in st]
    np.testing.assert_allclose(np.sort(lps), [100.0, 100.0, 100.0], atol=1e-6)
    np.testing.assert_allclose(np.sort(hps), [0.010265, 0.010265, 0.025275],
                               atol=1e-6)
Esempio n. 8
0
def test_corner_frequencies():
    # Default config has 'constant' corner frequency method, so the need
    # here is to force the 'snr' method.
    data_files, origin = read_data_dir('geonet', 'us1000778i', '*.V1A')
    streams = []
    for f in data_files:
        streams += read_data(f)

    sc = StreamCollection(streams)

    config = get_config()

    window_conf = config['windows']

    processed_streams = sc.copy()
    for st in processed_streams:
        if st.passed:
            # Estimate noise/signal split time
            event_time = origin.time
            event_lon = origin.longitude
            event_lat = origin.latitude
            st = signal_split(st, origin)

            # Estimate end of signal
            end_conf = window_conf['signal_end']
            event_mag = origin.magnitude
            print(st)
            st = signal_end(st,
                            event_time=event_time,
                            event_lon=event_lon,
                            event_lat=event_lat,
                            event_mag=event_mag,
                            **end_conf)
            wcheck_conf = window_conf['window_checks']
            st = window_checks(
                st,
                min_noise_duration=wcheck_conf['min_noise_duration'],
                min_signal_duration=wcheck_conf['min_signal_duration'])

    pconfig = config['processing']

    # Run SNR check
    # I think we don't do this anymore.
    test = [d for d in pconfig if list(d.keys())[0] == 'compute_snr']
    snr_config = test[0]['compute_snr']
    snr_config['check']['min_freq'] = 0.2
    for stream in processed_streams:
        stream = compute_snr(stream, mag=origin.magnitude, **snr_config)

    # Run get_corner_frequencies
    test = [
        d for d in pconfig if list(d.keys())[0] == 'get_corner_frequencies'
    ]
    cf_config = test[0]['get_corner_frequencies']
    snr_config = cf_config['snr']

    # With same_horiz False
    snr_config['same_horiz'] = False

    lp = []
    hp = []
    for stream in processed_streams:
        if not stream.passed:
            continue
        stream = get_corner_frequencies(stream, method="snr", snr=snr_config)
        if stream[0].hasParameter('corner_frequencies'):
            cfdict = stream[0].getParameter('corner_frequencies')
            lp.append(cfdict['lowpass'])
            hp.append(cfdict['highpass'])
    np.testing.assert_allclose(np.sort(hp), [0.003052, 0.003052, 0.010265],
                               atol=1e-6)

    st = processed_streams.select(station='HSES')[0]
    lps = [tr.getParameter('corner_frequencies')['lowpass'] for tr in st]
    hps = [tr.getParameter('corner_frequencies')['highpass'] for tr in st]
    np.testing.assert_allclose(np.sort(lps), [100., 100., 100.], atol=1e-6)
    np.testing.assert_allclose(np.sort(hps), [0.003052, 0.010265, 0.025275],
                               atol=1e-6)

    # With same_horiz True
    snr_config['same_horiz'] = True

    lp = []
    hp = []
    for stream in processed_streams:
        if not stream.passed:
            continue
        stream = get_corner_frequencies(stream, method="snr", snr=snr_config)
        if stream[0].hasParameter('corner_frequencies'):
            cfdict = stream[0].getParameter('corner_frequencies')
            lp.append(cfdict['lowpass'])
            hp.append(cfdict['highpass'])

    np.testing.assert_allclose(np.sort(hp), [0.003052, 0.010265, 0.017872],
                               atol=1e-6)

    st = processed_streams.select(station='HSES')[0]
    lps = [tr.getParameter('corner_frequencies')['lowpass'] for tr in st]
    hps = [tr.getParameter('corner_frequencies')['highpass'] for tr in st]
    np.testing.assert_allclose(np.sort(lps), [100., 100., 100.], atol=1e-6)
    np.testing.assert_allclose(np.sort(hps), [0.010265, 0.010265, 0.025275],
                               atol=1e-6)
Esempio n. 9
0
def _test_signal_split():

    st1 = read_data(os.path.join(data_path, 'AOM0170806140843.EW'))[0]
    st2 = read_data(os.path.join(data_path, 'AOM0170806140843.NS'))[0]
    st3 = read_data(os.path.join(data_path, 'AOM0170806140843.UD'))[0]
    st = st1 + st2 + st3

    # Test the AR pick
    PICKER_CONFIG['order_of_preference'] = ['ar', 'baer', 'cwb']
    signal_split(st, method='p_arrival', picker_config=PICKER_CONFIG)

    known_arrival = UTCDateTime(2008, 6, 13, 23, 44, 17)
    for tr in st:
        picker_arrival = tr.getParameter('signal_split')['split_time']
        assert abs(picker_arrival - known_arrival) < 1

    # Test the AR pick without 3 components - defaulting to Baer picker
    # reset the processing parameters...
    for trace in st:
        trace.stats.parameters = []
    st[0].stats.channel = '--'
    signal_split(st, method='p_arrival', picker_config=PICKER_CONFIG)

    for tr in st:
        signal_split_info = tr.getParameter('signal_split')
        picker_arrival = signal_split_info['split_time']
        assert abs(picker_arrival - known_arrival) < 1
        assert signal_split_info['picker_type'] == 'baer'

    # Test CWB picker
    # reset the processing parameters...

    # TODO - uncomment this and fix!!
    # for trace in st:
    #     trace.stats.parameters = []
    # PICKER_CONFIG['order_of_preference'][0] = 'cwb'
    # signal_split(st, method='p_arrival', picker_config=PICKER_CONFIG)
    # for tr in st:
    #     signal_split_info = tr.getParameter('signal_split')
    #     picker_arrival = signal_split_info['split_time']
    #     assert abs(picker_arrival - known_arrival) < 1
    #     assert signal_split_info['picker_type'] == 'cwb'

    # Test velocity split
    # reset the processing parameters...
    for trace in st:
        trace.stats.parameters = []
    signal_split(st,
                 event_time=UTCDateTime('2008-06-13 23:43:45'),
                 event_lon=140.881,
                 event_lat=39.030,
                 method='velocity')
    for tr in st:
        signal_split_info = tr.getParameter('signal_split')
        assert signal_split_info['method'] == 'velocity'
        assert signal_split_info['picker_type'] is None

    # Test an invalid picker type
    PICKER_CONFIG['order_of_preference'][0] = 'invalid'
    success = False
    try:
        signal_split(st, method='p_arrival', picker_config=PICKER_CONFIG)
        success = True
    except ValueError:
        pass
    assert success is False

    # Test an invalid split method
    success = False
    try:
        signal_split(st, method='invalid')
        success = True
    except ValueError:
        pass
    assert success is False
Esempio n. 10
0
def process_streams(streams, origin, config=None, old_streams=None):
    """
    Run processing steps from the config file.

    This method looks in the 'processing' config section and loops over those
    steps and hands off the config options to the appropriate prcessing method.
    Streams that fail any of the tests are kepth in the StreamCollection but
    the parameter 'passed_checks' is set to False and subsequent processing
    steps are not applied once a check has failed.

    Args:
        streams (StreamCollection):
            A StreamCollection object of unprocessed streams.
        origin (ScalarEvent):
            ScalarEvent object.
        config (dict):
            Configuration dictionary (or None). See get_config().
        old_streams (StreamCollection):
            A StreamCollection object of previously processed streams that contain
            manually reviewed information. None if not reprocessing.

    Returns:
        A StreamCollection object.
    """

    if not isinstance(streams, (StreamCollection, StreamArray)):
        raise ValueError("streams must be a StreamCollection instance.")

    if config is None:
        config = get_config()

    event_time = origin.time
    event_lon = origin.longitude
    event_lat = origin.latitude

    # -------------------------------------------------------------------------
    # Compute a travel-time matrix for interpolation later in the
    # trim_multiple events step
    if any("trim_multiple_events" in dict for dict in config["processing"]):
        travel_time_df, catalog = create_travel_time_dataframe(
            streams, **config["travel_time"])

    window_conf = config["windows"]
    model = TauPyModel(config["pickers"]["travel_time"]["model"])

    for st in streams:
        logging.debug(f"Checking stream {st.get_id()}...")
        # Estimate noise/signal split time
        st = signal_split(st,
                          origin,
                          model,
                          picker_config=config["pickers"],
                          config=config)

        # Estimate end of signal
        end_conf = window_conf["signal_end"]
        event_mag = origin.magnitude
        st = signal_end(
            st,
            event_time=event_time,
            event_lon=event_lon,
            event_lat=event_lat,
            event_mag=event_mag,
            **end_conf,
        )
        wcheck_conf = window_conf["window_checks"]
        if wcheck_conf["do_check"]:
            st = window_checks(
                st,
                min_noise_duration=wcheck_conf["min_noise_duration"],
                min_signal_duration=wcheck_conf["min_signal_duration"],
            )

    # -------------------------------------------------------------------------
    # Begin processing steps
    processing_steps = config["processing"]

    # Loop over streams
    for i, stream in enumerate(streams):
        logging.info(f"Stream: {stream.get_id()}")
        # Check if we are reprocessing (indicated by presence of old_streams)
        if old_streams is not None:
            old_stream = old_streams[i]
            for j in range(len(old_stream)):
                tr_old = old_stream[j]
                # Check if old_streams have review parameters because it is not
                # guaranteed
                if tr_old.hasParameter("review"):
                    review_dict = tr_old.getParameter("review")
                    # Transfer review parameter from old stream to new
                    stream[j].setParameter("review", review_dict)
                    # Was it failed via manual review?
                    if "accepted" in review_dict:
                        if not review_dict["accepted"]:
                            stream[j].fail("Manual review")

        for processing_step_dict in processing_steps:

            key_list = list(processing_step_dict.keys())
            if len(key_list) != 1:
                raise ValueError(
                    "Each processing step must contain exactly one key.")
            step_name = key_list[0]

            logging.debug(f"Processing step: {step_name}")
            step_args = processing_step_dict[step_name]
            # Using globals doesn't seem like a great solution here, but it
            # works.
            if step_name not in globals():
                raise ValueError(f"Processing step {step_name} is not valid.")

            # Origin is required by some steps and has to be handled specially.
            # There must be a better solution for this...
            if step_name in REQ_ORIGIN:
                step_args["origin"] = origin
            if step_name == "trim_multiple_events":
                step_args["catalog"] = catalog
                step_args["travel_time_df"] = travel_time_df
            if step_name == "compute_snr":
                step_args["mag"] = origin.magnitude

            if step_args is None:
                stream = globals()[step_name](stream, config)
            else:
                stream = globals()[step_name](stream,
                                              **step_args,
                                              config=config)

    # -------------------------------------------------------------------------
    # Begin colocated instrument selection
    if "colocated" in config:
        colocated_conf = config["colocated"]
        if isinstance(streams, StreamCollection):
            streams.select_colocated(**colocated_conf, origin=origin)

    for st in streams:
        for tr in st:
            tr.stats.standard.process_level = PROCESS_LEVELS["V2"]

    logging.info("Finished processing streams.")
    return streams
Esempio n. 11
0
def process_streams(streams, origin, config=None):
    """
    Run processing steps from the config file.

    This method looks in the 'processing' config section and loops over those
    steps and hands off the config options to the appropriate prcessing method.
    Streams that fail any of the tests are kepth in the StreamCollection but
    the parameter 'passed_checks' is set to False and subsequent processing
    steps are not applied once a check has failed.

    Args:
        streams (list):
            A StreamCollection object.
        origin (ScalarEvent):
            ScalarEvent object.
        config (dict):
            Configuration dictionary (or None). See get_config().

    Returns:
        A StreamCollection object.
    """

    if not isinstance(streams, StreamCollection):
        raise ValueError('streams must be a StreamCollection instance.')

    if config is None:
        config = get_config()

    event_time = origin.time
    event_lon = origin.longitude
    event_lat = origin.latitude

    # -------------------------------------------------------------------------
    # Compute a travel-time matrix for interpolation later in the
    # trim_multiple events step
    if any('trim_multiple_events' in dict for dict in config['processing']):
        travel_time_df, catalog = create_travel_time_dataframe(
            streams, **config['travel_time'])

    window_conf = config['windows']
    model = TauPyModel(config['pickers']['travel_time']['model'])

    for st in streams:
        logging.debug('Checking stream %s...' % st.get_id())
        # Estimate noise/signal split time
        st = signal_split(
            st,
            origin,
            model,
            picker_config=config['pickers'],
            config=config)

        # Estimate end of signal
        end_conf = window_conf['signal_end']
        event_mag = origin.magnitude
        st = signal_end(
            st,
            event_time=event_time,
            event_lon=event_lon,
            event_lat=event_lat,
            event_mag=event_mag,
            **end_conf
        )
        wcheck_conf = window_conf['window_checks']
        if wcheck_conf['do_check']:
            st = window_checks(
                st,
                min_noise_duration=wcheck_conf['min_noise_duration'],
                min_signal_duration=wcheck_conf['min_signal_duration']
            )

    # -------------------------------------------------------------------------
    # Begin processing steps
    processing_steps = config['processing']

    # Loop over streams
    for stream in streams:
        logging.info('Stream: %s' % stream.get_id())
        for processing_step_dict in processing_steps:

            key_list = list(processing_step_dict.keys())
            if len(key_list) != 1:
                raise ValueError(
                    'Each processing step must contain exactly one key.')
            step_name = key_list[0]

            logging.debug('Processing step: %s' % step_name)
            step_args = processing_step_dict[step_name]
            # Using globals doesn't seem like a great solution here, but it
            # works.
            if step_name not in globals():
                raise ValueError(
                    'Processing step %s is not valid.' % step_name)

            # Origin is required by some steps and has to be handled specially.
            # There must be a better solution for this...
            if step_name in REQ_ORIGIN:
                step_args['origin'] = origin
            if step_name == 'trim_multiple_events':
                step_args['catalog'] = catalog
                step_args['travel_time_df'] = travel_time_df
            if step_name == 'compute_snr':
                step_args['mag'] = origin.magnitude

            if step_args is None:
                stream = globals()[step_name](stream)
            else:
                stream = globals()[step_name](stream, **step_args)

    # -------------------------------------------------------------------------
    # Begin colocated instrument selection
    colocated_conf = config['colocated']
    streams.select_colocated(**colocated_conf)

    for st in streams:
        for tr in st:
            tr.stats.standard.process_level = PROCESS_LEVELS['V2']

    logging.info('Finished processing streams.')
    return streams
Esempio n. 12
0
def test_corner_frequencies_magnitude():
    # Default config has 'constant' corner frequency method, so the need
    # here is to force the 'magnitude' method.
    data_files, origin = read_data_dir("geonet", "us1000778i", "*.V1A")
    streams = []
    for f in data_files:
        streams += read_data(f)

    sc = StreamCollection(streams)

    config = get_config()

    window_conf = config["windows"]

    processed_streams = sc.copy()
    for st in processed_streams:
        if st.passed:
            # Estimate noise/signal split time
            event_time = origin.time
            event_lon = origin.longitude
            event_lat = origin.latitude
            st = signal_split(st, origin)

            # Estimate end of signal
            end_conf = window_conf["signal_end"]
            event_mag = origin.magnitude
            print(st)
            st = signal_end(
                st,
                event_time=event_time,
                event_lon=event_lon,
                event_lat=event_lat,
                event_mag=event_mag,
                **end_conf
            )
            wcheck_conf = window_conf["window_checks"]
            st = window_checks(
                st,
                min_noise_duration=wcheck_conf["min_noise_duration"],
                min_signal_duration=wcheck_conf["min_signal_duration"],
            )

    pconfig = config["processing"]

    # Run get_corner_frequencies
    test = [d for d in pconfig if list(d.keys())[0] == "get_corner_frequencies"]
    cf_config = test[0]["get_corner_frequencies"]
    mag_config = cf_config["magnitude"]

    lp = []
    hp = []
    for stream in processed_streams:
        if not stream.passed:
            continue
        stream = get_corner_frequencies(
            stream, origin, method="magnitude", magnitude=mag_config
        )
        if stream[0].hasParameter("corner_frequencies"):
            cfdict = stream[0].getParameter("corner_frequencies")
            lp.append(cfdict["lowpass"])
            hp.append(cfdict["highpass"])