예제 #1
0
def test_get_travel_time_df():
    datapath = os.path.join('data', 'testdata', 'travel_times')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)

    sc1 = StreamCollection.from_directory(os.path.join(datadir, 'ci37218996'))
    sc2 = StreamCollection.from_directory(os.path.join(datadir, 'ci38461735'))
    scs = [sc1, sc2]

    df1, catalog = create_travel_time_dataframe(
        sc1, os.path.join(datadir, 'catalog_test_traveltimes.csv'), 5, 0.1,
        'iasp91')
    df2, catalog = create_travel_time_dataframe(
        sc2, os.path.join(datadir, 'catalog_test_traveltimes.csv'), 5, 0.1,
        'iasp91')

    model = TauPyModel('iasp91')
    for dfidx, df in enumerate([df1, df2]):
        for staidx, sta in enumerate(df):
            for eqidx, time in enumerate(df[sta]):
                sta_coords = scs[dfidx][staidx][0].stats.coordinates
                event = catalog[eqidx]
                dist = locations2degrees(sta_coords['latitude'],
                                         sta_coords['longitude'],
                                         event.latitude, event.longitude)
                if event.depth_km < 0:
                    depth = 0
                else:
                    depth = event.depth_km
                travel_time = model.get_travel_times(depth, dist,
                                                     ['p', 'P', 'Pn'])[0].time
                abs_time = event.time + travel_time
                np.testing.assert_almost_equal(abs_time, time, decimal=1)
예제 #2
0
def test_get_travel_time_df():
    datapath = os.path.join("data", "testdata", "travel_times")
    datadir = pkg_resources.resource_filename("gmprocess", datapath)

    sc1 = StreamCollection.from_directory(os.path.join(datadir, "ci37218996"))
    sc2 = StreamCollection.from_directory(os.path.join(datadir, "ci38461735"))
    scs = [sc1, sc2]

    df1, catalog = create_travel_time_dataframe(
        sc1, os.path.join(datadir, "catalog_test_traveltimes.csv"), 5, 0.1,
        "iasp91")
    df2, catalog = create_travel_time_dataframe(
        sc2, os.path.join(datadir, "catalog_test_traveltimes.csv"), 5, 0.1,
        "iasp91")

    model = TauPyModel("iasp91")
    for dfidx, df in enumerate([df1, df2]):
        for staidx, sta in enumerate(df):
            for eqidx, time in enumerate(df[sta]):
                sta_coords = scs[dfidx][staidx][0].stats.coordinates
                event = catalog[eqidx]
                dist = locations2degrees(
                    sta_coords["latitude"],
                    sta_coords["longitude"],
                    event.latitude,
                    event.longitude,
                )
                if event.depth_km < 0:
                    depth = 0
                else:
                    depth = event.depth_km
                travel_time = model.get_travel_times(depth, dist,
                                                     ["p", "P", "Pn"])[0].time
                abs_time = event.time + travel_time
                np.testing.assert_almost_equal(abs_time, time, decimal=1)
예제 #3
0
def test_colocated():
    datapath = os.path.join("data", "testdata", "colocated_instruments")
    datadir = pkg_resources.resource_filename("gmprocess", datapath)
    sc = StreamCollection.from_directory(datadir)

    sc.select_colocated()
    assert sc.n_passed == 7
    assert sc.n_failed == 4

    # What if no preference is matched?
    sc = StreamCollection.from_directory(datadir)
    sc.select_colocated(preference=["XX"])
    assert sc.n_passed == 3
    assert sc.n_failed == 8
def test_allow_nans():
    dpath = os.path.join("data", "testdata", "fdsn", "uu60363602")
    datadir = pkg_resources.resource_filename("gmprocess", dpath)
    sc = StreamCollection.from_directory(datadir)
    origin = read_event_json_files([os.path.join(datadir, "event.json")])[0]
    psc = process_streams(sc, origin)
    st = psc[0]

    ss = StationSummary.from_stream(
        st,
        components=["quadratic_mean"],
        imts=["FAS(4.0)"],
        bandwidth=300,
        allow_nans=True,
    )
    assert np.isnan(ss.pgms.Result).all()

    ss = StationSummary.from_stream(
        st,
        components=["quadratic_mean"],
        imts=["FAS(4.0)"],
        bandwidth=189,
        allow_nans=False,
    )
    assert ~np.isnan(ss.pgms.Result).all()
예제 #5
0
def test_zero_crossings():
    datapath = os.path.join("data", "testdata", "zero_crossings")
    datadir = pkg_resources.resource_filename("gmprocess", datapath)
    sc = StreamCollection.from_directory(datadir)
    sc.describe()

    conf = get_config()

    update = {
        "processing": [
            {"detrend": {"detrending_method": "demean"}},
            {"check_zero_crossings": {"min_crossings": 1}},
        ]
    }
    update_dict(conf, update)

    edict = {
        "id": "ak20419010",
        "time": UTCDateTime("2018-11-30T17:29:29"),
        "lat": 61.346,
        "lon": -149.955,
        "depth": 46.7,
        "magnitude": 7.1,
    }
    event = get_event_object(edict)
    test = process_streams(sc, event, conf)
    for st in test:
        for tr in st:
            assert tr.hasParameter("ZeroCrossingRate")
    np.testing.assert_allclose(
        test[0][0].getParameter("ZeroCrossingRate")["crossing_rate"],
        0.008888888888888889,
        atol=1e-5,
    )
def test_fit_spectra():
    config = get_config()
    datapath = os.path.join('data', 'testdata', 'demo', 'ci38457511', 'raw')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)
    event = get_event_object('ci38457511')
    sc = StreamCollection.from_directory(datadir)
    for st in sc:
        st = signal_split(st, event)
        end_conf = config['windows']['signal_end']
        st = signal_end(st,
                        event_time=event.time,
                        event_lon=event.longitude,
                        event_lat=event.latitude,
                        event_mag=event.magnitude,
                        **end_conf)
        st = compute_snr(st, 30)
        st = get_corner_frequencies(st,
                                    method='constant',
                                    constant={
                                        'highpass': 0.08,
                                        'lowpass': 20.0
                                    })

    for st in sc:
        spectrum.fit_spectra(st, event)
def _test_colocated():
    eventid = 'ci38445975'
    datafiles, event = read_data_dir('fdsn', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config_file = os.path.join(datadir, 'test_config.yml')
    with open(config_file, 'r', encoding='utf-8') as f:
        config = yaml.load(f, Loader=yaml.FullLoader)
    processed_streams = process_streams(raw_streams, event, config=config)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        ws = StreamWorkspace(tfile)
        ws.addEvent(event)
        ws.addStreams(event, raw_streams, label='raw')
        ws.addStreams(event, processed_streams, label='processed')
        ws.calcMetrics(eventid, labels=['processed'], config=config)
        stasum = ws.getStreamMetrics(eventid, 'CI', 'MIKB', 'processed')
        np.testing.assert_allclose(
            stasum.get_pgm('duration', 'geometric_mean'), 38.94480068)
        ws.close()
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
예제 #8
0
def test_get_status():
    dpath = os.path.join('data', 'testdata', 'status')
    directory = pkg_resources.resource_filename('gmprocess', dpath)
    sc = StreamCollection.from_directory(directory)

    # Manually fail some of the streams
    sc.select(station='BSAP')[0][0].fail('Failure 0')
    sc.select(station='CPE')[0][0].fail('Failure 1')
    sc.select(station='MIKB', instrument='HN')[0][0].fail('Failure 2')
    sc.select(network='PG', station='PSD')[0][0].fail('Failure 3')

    # Test results from 'short', 'long', and 'net
    short = sc.get_status('short')
    assert (short == 1).all()

    long = sc.get_status('long')
    assert long.at['AZ.BSAP.HN'] == 'Failure 0'
    assert long.at['AZ.BZN.HN'] == ''
    assert long.at['AZ.CPE.HN'] == 'Failure 1'
    assert long.at['CI.MIKB.BN'] == ''
    assert long.at['CI.MIKB.HN'] == 'Failure 2'
    assert long.at['CI.PSD.HN'] == ''
    assert long.at['PG.PSD.HN'] == 'Failure 3'

    net = sc.get_status('net')
    assert net.at['AZ', 'Number Passed'] == 1
    assert net.at['AZ', 'Number Failed'] == 2
    assert net.at['CI', 'Number Passed'] == 2
    assert net.at['CI', 'Number Failed'] == 1
    assert net.at['PG', 'Number Passed'] == 0
    assert net.at['PG', 'Number Failed'] == 1
예제 #9
0
def test_fit_spectra():
    config = get_config()
    datapath = os.path.join("data", "testdata", "demo", "ci38457511", "raw")
    datadir = pkg_resources.resource_filename("gmprocess", datapath)
    event = get_event_object("ci38457511")
    sc = StreamCollection.from_directory(datadir)
    for st in sc:
        st = signal_split(st, event)
        end_conf = config["windows"]["signal_end"]
        st = signal_end(st,
                        event_time=event.time,
                        event_lon=event.longitude,
                        event_lat=event.latitude,
                        event_mag=event.magnitude,
                        **end_conf)
        st = compute_snr(st, 30)
        st = get_corner_frequencies(st,
                                    event,
                                    method="constant",
                                    constant={
                                        "highpass": 0.08,
                                        "lowpass": 20.0
                                    })

    for st in sc:
        spectrum.fit_spectra(st, event)
def test_get_status():
    dpath = os.path.join("data", "testdata", "status")
    directory = pkg_resources.resource_filename("gmprocess", dpath)
    sc = StreamCollection.from_directory(directory)

    # Manually fail some of the streams
    sc.select(station="BSAP")[0][0].fail("Failure 0")
    sc.select(station="CPE")[0][0].fail("Failure 1")
    sc.select(station="MIKB", instrument="HN")[0][0].fail("Failure 2")
    sc.select(network="PG", station="PSD")[0][0].fail("Failure 3")

    # Test results from 'short', 'long', and 'net
    short = sc.get_status("short")
    assert (short == 1).all()

    long = sc.get_status("long")
    assert long.at["AZ.BSAP.HN"] == "Failure 0"
    assert long.at["AZ.BZN.HN"] == ""
    assert long.at["AZ.CPE.HN"] == "Failure 1"
    assert long.at["CI.MIKB.BN"] == ""
    assert long.at["CI.MIKB.HN"] == "Failure 2"
    assert long.at["CI.PSD.HN"] == ""
    assert long.at["PG.PSD.HN"] == "Failure 3"

    net = sc.get_status("net")
    assert net.at["AZ", "Number Passed"] == 1
    assert net.at["AZ", "Number Failed"] == 2
    assert net.at["CI", "Number Passed"] == 2
    assert net.at["CI", "Number Failed"] == 1
    assert net.at["PG", "Number Passed"] == 0
    assert net.at["PG", "Number Failed"] == 1
예제 #11
0
def _test_colocated():
    eventid = "ci38445975"
    datafiles, event = read_data_dir("fdsn", eventid, "*")
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config_file = os.path.join(datadir, "test_config.yml")
    with open(config_file, "r", encoding="utf-8") as f:
        yaml = YAML()
        yaml.preserve_quotes = True
        config = yaml.load(f)
    processed_streams = process_streams(raw_streams, event, config=config)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, "test.hdf")
        ws = StreamWorkspace(tfile)
        ws.addEvent(event)
        ws.addStreams(event, raw_streams, label="raw")
        ws.addStreams(event, processed_streams, label="processed")
        ws.calcMetrics(eventid, labels=["processed"], config=config)
        stasum = ws.getStreamMetrics(eventid, "CI", "MIKB", "processed")
        np.testing.assert_allclose(
            stasum.get_pgm("duration", "geometric_mean"), 38.94480068)
        ws.close()
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
예제 #12
0
def test():
    # Read in data with only one stationxml entry
    data_files, origin = read_data_dir("station_xml_epochs", "nc73631381",
                                       "*.mseed")
    test_root = os.path.normpath(os.path.join(data_files[0], os.pardir))
    sc = StreamCollection.from_directory(test_root)
    psc = process_streams(sc, origin)

    # Read in data with all dates in stationxml
    data_files, origin = read_data_dir("station_xml_epochs", "nc73631381_ad",
                                       "*.mseed")
    test_root = os.path.normpath(os.path.join(data_files[0], os.pardir))
    sc_ad = StreamCollection.from_directory(test_root)
    psc_ad = process_streams(sc_ad, origin)

    single_maxes = np.sort([np.max(tr.data) for tr in psc[0]])
    alldates_maxes = np.sort([np.max(tr.data) for tr in psc_ad[0]])
    assert_allclose(single_maxes, alldates_maxes)
예제 #13
0
def test_num_horizontals():
    data_path = pkg_resources.resource_filename('gmprocess', 'data')
    sc = StreamCollection.from_directory(
        os.path.join(data_path, 'testdata', 'fdsn', 'uw61251926',
                     'strong_motion'))
    st = sc.select(station='SP2')[0]
    assert st.num_horizontal == 2

    for tr in st:
        tr.stats.channel = 'ENZ'
    assert st.num_horizontal == 0

    for tr in st:
        tr.stats.channel = 'EN1'
    assert st.num_horizontal == 3
예제 #14
0
def test_num_horizontals():
    data_path = pkg_resources.resource_filename("gmprocess", "data")
    sc = StreamCollection.from_directory(
        os.path.join(data_path, "testdata", "fdsn", "uw61251926",
                     "strong_motion"))
    st = sc.select(station="SP2")[0]
    assert st.num_horizontal == 2

    for tr in st:
        tr.stats.channel = "ENZ"
    assert st.num_horizontal == 0

    for tr in st:
        tr.stats.channel = "EN1"
    assert st.num_horizontal == 3
예제 #15
0
def test_metrics():
    eventid = "usb000syza"
    datafiles, event = read_data_dir("knet", eventid, "*")
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = update_config(os.path.join(datadir, "config_min_freq_0p2.yml"))
    # turn off sta/lta check and snr checks
    # newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    # processed_streams = process_streams(raw_streams, event, config=newconfig)
    newconfig = config.copy()
    newconfig["processing"].append(
        {"NNet_QA": {
            "acceptance_threshold": 0.5,
            "model_name": "CantWell"
        }})
    processed_streams = process_streams(raw_streams.copy(),
                                        event,
                                        config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, "test.hdf")
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, raw_streams, label="raw")
        workspace.addStreams(event, processed_streams, label="processed")
        stream1 = raw_streams[0]

        # Get metrics from station summary for raw streams
        summary1 = StationSummary.from_config(stream1)
        s1_df_in = summary1.pgms.sort_values(["IMT", "IMC"])
        array1 = s1_df_in["Result"].to_numpy()

        # Compare to metrics from getStreamMetrics for raw streams
        workspace.calcMetrics(eventid, labels=["raw"])
        summary1_a = workspace.getStreamMetrics(event.id,
                                                stream1[0].stats.network,
                                                stream1[0].stats.station,
                                                "raw")
        s1_df_out = summary1_a.pgms.sort_values(["IMT", "IMC"])
        array2 = s1_df_out["Result"].to_numpy()

        np.testing.assert_allclose(array1, array2, atol=1e-6, rtol=1e-6)
        workspace.close()
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
def test_metrics():
    eventid = 'usb000syza'
    datafiles, event = read_data_dir('knet', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = update_config(os.path.join(datadir, 'config_min_freq_0p2.yml'))
    # turn off sta/lta check and snr checks
    # newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    # processed_streams = process_streams(raw_streams, event, config=newconfig)
    newconfig = config.copy()
    newconfig['processing'].append(
        {'NNet_QA': {
            'acceptance_threshold': 0.5,
            'model_name': 'CantWell'
        }})
    processed_streams = process_streams(raw_streams.copy(),
                                        event,
                                        config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, raw_streams, label='raw')
        workspace.addStreams(event, processed_streams, label='processed')
        stream1 = raw_streams[0]

        # Get metrics from station summary for raw streams
        summary1 = StationSummary.from_config(stream1)
        s1_df_in = summary1.pgms.sort_values(['IMT', 'IMC'])
        array1 = s1_df_in['Result'].to_numpy()

        # Compare to metrics from getStreamMetrics for raw streams
        workspace.calcMetrics(eventid, labels=['raw'])
        summary1_a = workspace.getStreamMetrics(event.id,
                                                stream1[0].stats.network,
                                                stream1[0].stats.station,
                                                'raw')
        s1_df_out = summary1_a.pgms.sort_values(['IMT', 'IMC'])
        array2 = s1_df_out['Result'].to_numpy()

        np.testing.assert_allclose(array1, array2, atol=1e-6, rtol=1e-6)
        workspace.close()
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
예제 #17
0
def _test_trim_multiple_events():
    datapath = os.path.join("data", "testdata", "multiple_events")
    datadir = pkg_resources.resource_filename("gmprocess", datapath)
    sc = StreamCollection.from_directory(os.path.join(datadir, "ci38457511"))
    origin = get_event_object("ci38457511")
    df, catalog = create_travel_time_dataframe(
        sc, os.path.join(datadir, "catalog.csv"), 5, 0.1, "iasp91"
    )
    for st in sc:
        st.detrend("demean")
        remove_response(st, None, None)
        st = corner_frequencies.from_constant(st)
        lowpass_filter(st)
        highpass_filter(st)
        signal_split(st, origin)
        signal_end(
            st,
            origin.time,
            origin.longitude,
            origin.latitude,
            origin.magnitude,
            method="model",
            model="AS16",
        )
        cut(st, 2)
        trim_multiple_events(
            st, origin, catalog, df, 0.2, 0.7, "B14", {"vs30": 760}, {"rake": 0}
        )

    num_failures = sum([1 if not st.passed else 0 for st in sc])
    assert num_failures == 2

    failure = sc.select(station="WRV2")[0][0].getParameter("failure")
    assert failure["module"] == "trim_multiple_events"
    assert failure["reason"] == (
        "A significant arrival from another event "
        "occurs within the first 70.0 percent of the "
        "signal window"
    )

    for tr in sc.select(station="JRC2")[0]:
        np.testing.assert_almost_equal(
            tr.stats.endtime, UTCDateTime("2019-07-06T03:20:56.368300Z")
        )
def test_allow_nans():
    dpath = os.path.join('data', 'testdata', 'fdsn', 'uu60363602')
    datadir = pkg_resources.resource_filename('gmprocess', dpath)
    sc = StreamCollection.from_directory(datadir)
    origin = read_event_json_files([os.path.join(datadir, 'event.json')])[0]
    psc = process_streams(sc, origin)
    st = psc[0]

    ss = StationSummary.from_stream(st,
                                    components=['quadratic_mean'],
                                    imts=['FAS(4.0)'],
                                    bandwidth=189,
                                    allow_nans=True)
    assert np.isnan(ss.pgms.Result).all()

    ss = StationSummary.from_stream(st,
                                    components=['quadratic_mean'],
                                    imts=['FAS(4.0)'],
                                    bandwidth=189,
                                    allow_nans=False)
    assert ~np.isnan(ss.pgms.Result).all()
def test_zero_crossings():
    datapath = os.path.join('data', 'testdata', 'zero_crossings')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)
    sc = StreamCollection.from_directory(datadir)
    sc.describe()

    conf = get_config()

    update = {
        'processing': [{
            'detrend': {
                'detrending_method': 'demean'
            }
        }, {
            'check_zero_crossings': {
                'min_crossings': 1
            }
        }]
    }
    update_dict(conf, update)

    edict = {
        'id': 'ak20419010',
        'time': UTCDateTime('2018-11-30T17:29:29'),
        'lat': 61.346,
        'lon': -149.955,
        'depth': 46.7,
        'magnitude': 7.1
    }
    event = get_event_object(edict)
    test = process_streams(sc, event, conf)
    for st in test:
        for tr in st:
            assert tr.hasParameter('ZeroCrossingRate')
    np.testing.assert_allclose(
        test[0][0].getParameter('ZeroCrossingRate')['crossing_rate'],
        0.008888888888888889,
        atol=1e-5)
def _test_metrics2():
    eventid = 'usb000syza'
    datafiles, event = read_data_dir('knet', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = update_config(os.path.join(datadir, 'config_min_freq_0p2.yml'))
    config['metrics']['output_imts'].append('Arias')
    config['metrics']['output_imcs'].append('arithmetic_mean')
    # Adjust checks so that streams pass checks for this test
    newconfig = drop_processing(config, ['check_sta_lta'])
    csnr = [s for s in newconfig['processing'] if 'compute_snr' in s.keys()][0]
    csnr['compute_snr']['check']['threshold'] = -10.0
    processed_streams = process_streams(raw_streams, event, config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, processed_streams, label='processed')
        workspace.calcMetrics(event.id, labels=['processed'])
        etable, imc_tables1, readmes1 = workspace.getTables('processed')
        assert 'ARITHMETIC_MEAN' not in imc_tables1
        assert 'ARITHMETIC_MEAN' not in readmes1
        del workspace.dataset.auxiliary_data.WaveFormMetrics
        del workspace.dataset.auxiliary_data.StationMetrics
        workspace.calcMetrics(event.id, labels=['processed'], config=config)
        etable2, imc_tables2, readmes2 = workspace.getTables('processed')
        assert 'ARITHMETIC_MEAN' in imc_tables2
        assert 'ARITHMETIC_MEAN' in readmes2
        assert 'ARIAS' in imc_tables2['ARITHMETIC_MEAN']
        testarray = readmes2['ARITHMETIC_MEAN']['Column header'].to_numpy()
        assert 'ARIAS' in testarray
        workspace.close()
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
예제 #21
0
def _test_metrics2():
    eventid = "usb000syza"
    datafiles, event = read_data_dir("knet", eventid, "*")
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = update_config(os.path.join(datadir, "config_min_freq_0p2.yml"))
    config["metrics"]["output_imts"].append("Arias")
    config["metrics"]["output_imcs"].append("arithmetic_mean")
    # Adjust checks so that streams pass checks for this test
    newconfig = drop_processing(config, ["check_sta_lta"])
    csnr = [s for s in newconfig["processing"] if "compute_snr" in s.keys()][0]
    csnr["compute_snr"]["check"]["threshold"] = -10.0
    processed_streams = process_streams(raw_streams, event, config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, "test.hdf")
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, processed_streams, label="processed")
        workspace.calcMetrics(event.id, labels=["processed"])
        etable, imc_tables1, readmes1 = workspace.getTables("processed")
        assert "ARITHMETIC_MEAN" not in imc_tables1
        assert "ARITHMETIC_MEAN" not in readmes1
        del workspace.dataset.auxiliary_data.WaveFormMetrics
        del workspace.dataset.auxiliary_data.StationMetrics
        workspace.calcMetrics(event.id, labels=["processed"], config=config)
        etable2, imc_tables2, readmes2 = workspace.getTables("processed")
        assert "ARITHMETIC_MEAN" in imc_tables2
        assert "ARITHMETIC_MEAN" in readmes2
        assert "ARIAS" in imc_tables2["ARITHMETIC_MEAN"]
        testarray = readmes2["ARITHMETIC_MEAN"]["Column header"].to_numpy()
        assert "ARIAS" in testarray
        workspace.close()
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
예제 #22
0
def test_trim_multiple_events():
    datapath = os.path.join('data', 'testdata', 'multiple_events')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)
    sc = StreamCollection.from_directory(os.path.join(datadir, 'ci38457511'))
    origin = get_event_object('ci38457511')
    df, catalog = create_travel_time_dataframe(
        sc, os.path.join(datadir, 'catalog.csv'), 5, 0.1, 'iasp91')
    for st in sc:
        st.detrend('demean')
        remove_response(st, None, None)
        st = corner_frequencies.get_constant(st)
        lowpass_filter(st)
        highpass_filter(st)
        signal_split(st, origin)
        signal_end(st,
                   origin.time,
                   origin.longitude,
                   origin.latitude,
                   origin.magnitude,
                   method='model',
                   model='AS16')
        cut(st, 2)
        trim_multiple_events(st, origin, catalog, df, 0.2, 0.7, 'B14',
                             {'vs30': 760}, {'rake': 0})

    num_failures = sum([1 if not st.passed else 0 for st in sc])
    assert num_failures == 1

    failure = sc.select(station='WRV2')[0][0].getParameter('failure')
    assert failure['module'] == 'trim_multiple_events'
    assert failure['reason'] == ('A significant arrival from another event '
                                 'occurs within the first 70.0 percent of the '
                                 'signal window')

    for tr in sc.select(station='JRC2')[0]:
        np.testing.assert_almost_equal(
            tr.stats.endtime, UTCDateTime('2019-07-06T03:20:38.7983Z'))
예제 #23
0
def test_lowpass_max():
    datapath = os.path.join("data", "testdata", "lowpass_max")
    datadir = pkg_resources.resource_filename("gmprocess", datapath)
    sc = StreamCollection.from_directory(datadir)
    sc.describe()

    conf = get_config()
    update = {
        "processing": [
            {
                "detrend": {
                    "detrending_method": "demean"
                }
            },
            {
                "remove_response": {
                    "f1": 0.001,
                    "f2": 0.005,
                    "f3": None,
                    "f4": None,
                    "water_level": 60,
                }
            },
            #            {'detrend': {'detrending_method': 'linear'}},
            #            {'detrend': {'detrending_method': 'demean'}},
            {
                "get_corner_frequencies": {
                    "constant": {
                        "highpass": 0.08,
                        "lowpass": 20.0
                    },
                    "method": "constant",
                    "snr": {
                        "same_horiz": True
                    },
                }
            },
            {
                "lowpass_max_frequency": {
                    "fn_fac": 0.9
                }
            },
        ]
    }
    update_dict(conf, update)
    update = {
        "windows": {
            "signal_end": {
                "method": "model",
                "vmin": 1.0,
                "floor": 120,
                "model": "AS16",
                "epsilon": 2.0,
            },
            "window_checks": {
                "do_check": False,
                "min_noise_duration": 1.0,
                "min_signal_duration": 1.0,
            },
        }
    }
    update_dict(conf, update)
    edict = {
        "id": "ci38038071",
        "time": UTCDateTime("2018-08-30 02:35:36"),
        "lat": 34.136,
        "lon": -117.775,
        "depth": 5.5,
        "magnitude": 4.4,
    }
    event = get_event_object(edict)
    test = process_streams(sc, event, conf)
    for st in test:
        for tr in st:
            freq_dict = tr.getParameter("corner_frequencies")
            np.testing.assert_allclose(freq_dict["lowpass"], 18.0)
예제 #24
0
def test_lowpass_max():
    datapath = os.path.join('data', 'testdata', 'lowpass_max')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)
    sc = StreamCollection.from_directory(datadir)
    sc.describe()

    conf = get_config()
    update = {
        'processing': [
            {
                'detrend': {
                    'detrending_method': 'demean'
                }
            },
            {
                'remove_response': {
                    'f1': 0.001,
                    'f2': 0.005,
                    'f3': None,
                    'f4': None,
                    'output': 'ACC',
                    'water_level': 60
                }
            },
            #            {'detrend': {'detrending_method': 'linear'}},
            #            {'detrend': {'detrending_method': 'demean'}},
            {
                'get_corner_frequencies': {
                    'constant': {
                        'highpass': 0.08,
                        'lowpass': 20.0
                    },
                    'method': 'constant',
                    'snr': {
                        'same_horiz': True
                    }
                }
            },
            {
                'lowpass_max_frequency': {
                    'fn_fac': 0.9
                }
            }
        ]
    }
    update_dict(conf, update)
    update = {
        'windows': {
            'signal_end': {
                'method': 'model',
                'vmin': 1.0,
                'floor': 120,
                'model': 'AS16',
                'epsilon': 2.0
            },
            'window_checks': {
                'do_check': False,
                'min_noise_duration': 1.0,
                'min_signal_duration': 1.0
            }
        }
    }
    update_dict(conf, update)
    edict = {
        'id': 'ci38038071',
        'time': UTCDateTime('2018-08-30 02:35:36'),
        'lat': 34.136,
        'lon': -117.775,
        'depth': 5.5,
        'magnitude': 4.4
    }
    event = get_event_object(edict)
    test = process_streams(sc, event, conf)
    for st in test:
        for tr in st:
            freq_dict = tr.getParameter('corner_frequencies')
            np.testing.assert_allclose(freq_dict['lowpass'], 18.0)
def _test_vs30_dist_metrics():
    KNOWN_DISTANCES = {
        'epicentral': 5.1,
        'hypocentral': 10.2,
        'rupture': 2.21,
        'rupture_var': np.nan,
        'joyner_boore': 2.21,
        'joyner_boore_var': np.nan,
        'gc2_rx': 2.66,
        'gc2_ry': 3.49,
        'gc2_ry0': 0.00,
        'gc2_U': 34.34,
        'gc2_T': 2.66
    }
    KNOWN_BAZ = 239.46
    KNOWN_VS30 = 331.47

    eventid = 'ci38457511'
    datafiles, event = read_data_dir('fdsn', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = update_config(os.path.join(datadir, 'config_min_freq_0p2.yml'))
    processed_streams = process_streams(raw_streams, event, config=config)
    rupture_file = get_rupture_file(datadir)
    grid_file = os.path.join(datadir, 'test_grid.grd')
    config['metrics']['vs30'] = {
        'vs30': {
            'file': grid_file,
            'column_header': 'GlobalVs30',
            'readme_entry': 'GlobalVs30',
            'units': 'm/s'
        }
    }
    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        ws = StreamWorkspace(tfile)
        ws.addEvent(event)
        ws.addStreams(event, raw_streams, label='raw')
        ws.addStreams(event, processed_streams, label='processed')
        ws.calcMetrics(event.id,
                       rupture_file=rupture_file,
                       labels=['processed'],
                       config=config)
        sta_sum = ws.getStreamMetrics(event.id, 'CI', 'CLC', 'processed')

        for dist in sta_sum.distances:
            np.testing.assert_allclose(sta_sum.distances[dist],
                                       KNOWN_DISTANCES[dist],
                                       rtol=0.01)
        np.testing.assert_allclose(sta_sum._back_azimuth, KNOWN_BAZ, rtol=0.01)
        np.testing.assert_allclose(sta_sum._vs30['vs30']['value'],
                                   KNOWN_VS30,
                                   rtol=0.01)
        event_df, imc_tables, readme_tables = ws.getTables('processed')
        ws.close()
        check_cols = set([
            'EpicentralDistance', 'HypocentralDistance', 'RuptureDistance',
            'RuptureDistanceVar', 'JoynerBooreDistance',
            'JoynerBooreDistanceVar', 'GC2_rx', 'GC2_ry', 'GC2_ry0', 'GC2_U',
            'GC2_T', 'GlobalVs30', 'BackAzimuth'
        ])
        assert check_cols.issubset(set(readme_tables['Z']['Column header']))
        assert check_cols.issubset(set(imc_tables['Z'].columns))
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
예제 #26
0
def test_StreamCollection():

    # read usc data
    dpath = os.path.join('data', 'testdata', 'usc', 'ci3144585')
    directory = pkg_resources.resource_filename('gmprocess', dpath)
    usc_streams, unprocessed_files, unprocessed_file_errors = \
        directory_to_streams(directory)
    assert len(usc_streams) == 7

    usc_sc = StreamCollection(usc_streams)

    # Use print method
    print(usc_sc)

    # Use len method
    assert len(usc_sc) == 3

    # Use nonzero method
    assert bool(usc_sc)

    # Slice
    lengths = [len(usc_sc[0]), len(usc_sc[1]), len(usc_sc[2])]
    sort_lengths = np.sort(lengths)
    assert sort_lengths[0] == 1
    assert sort_lengths[1] == 3
    assert sort_lengths[2] == 3

    # read dmg data
    dpath = os.path.join('data', 'testdata', 'dmg', 'ci3144585')
    directory = pkg_resources.resource_filename('gmprocess', dpath)
    dmg_streams, unprocessed_files, unprocessed_file_errors = \
        directory_to_streams(directory)
    assert len(dmg_streams) == 1

    dmg_sc = StreamCollection(dmg_streams)

    # Has one station
    assert len(dmg_sc) == 1
    # With 3 channels
    assert len(dmg_sc[0]) == 3

    # So this should have 4 stations
    test1 = dmg_sc + usc_sc
    assert len(test1) == 4

    test_copy = dmg_sc.copy()
    assert test_copy[0][0].stats['standard']['process_level'] == \
        'uncorrected physical units'

    stream1 = test_copy[0]
    test_append = usc_sc.append(stream1)
    assert len(test_append) == 4

    # Change back to unique values for station/network
    for tr in dmg_sc[0]:
        tr.stats['network'] = 'LALALA'
        tr.stats['station'] = '575757'
    stream2 = dmg_sc[0]
    test_append = usc_sc.append(stream2)
    assert len(test_append) == 4

    # Check the from_directory method
    sc_test = StreamCollection.from_directory(directory)
    assert len(sc_test) == 1

    # Test to_dataframe
    jsonfile = os.path.join(directory, 'event.json')
    with open(jsonfile, 'rt', encoding='utf-8') as f:
        origin = json.load(f)
    dmg_df = sc_test.to_dataframe(origin)
    np.testing.assert_allclose(dmg_df['H1']['PGA'], 0.145615, atol=1e5)

    # Check the from_traces method
    traces = []
    for st in sc_test:
        for tr in st:
            traces.append(tr)
    sc_test = StreamCollection.from_traces(traces)
    assert len(sc_test) == 1
예제 #27
0
def _test_vs30_dist_metrics():
    KNOWN_DISTANCES = {
        "epicentral": 5.1,
        "hypocentral": 10.2,
        "rupture": 2.21,
        "rupture_var": np.nan,
        "joyner_boore": 2.21,
        "joyner_boore_var": np.nan,
        "gc2_rx": 2.66,
        "gc2_ry": 3.49,
        "gc2_ry0": 0.00,
        "gc2_U": 34.34,
        "gc2_T": 2.66,
    }
    KNOWN_BAZ = 239.46
    KNOWN_VS30 = 331.47

    eventid = "ci38457511"
    datafiles, event = read_data_dir("fdsn", eventid, "*")
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = update_config(os.path.join(datadir, "config_min_freq_0p2.yml"))
    processed_streams = process_streams(raw_streams, event, config=config)
    rupture_file = get_rupture_file(datadir)
    grid_file = os.path.join(datadir, "test_grid.grd")
    config["metrics"]["vs30"] = {
        "vs30": {
            "file": grid_file,
            "column_header": "GlobalVs30",
            "readme_entry": "GlobalVs30",
            "units": "m/s",
        }
    }
    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, "test.hdf")
        ws = StreamWorkspace(tfile)
        ws.addEvent(event)
        ws.addStreams(event, raw_streams, label="raw")
        ws.addStreams(event, processed_streams, label="processed")
        ws.calcMetrics(event.id,
                       rupture_file=rupture_file,
                       labels=["processed"],
                       config=config)
        sta_sum = ws.getStreamMetrics(event.id, "CI", "CLC", "processed")

        for dist in sta_sum.distances:
            np.testing.assert_allclose(sta_sum.distances[dist],
                                       KNOWN_DISTANCES[dist],
                                       rtol=0.01)
        np.testing.assert_allclose(sta_sum._back_azimuth, KNOWN_BAZ, rtol=0.01)
        np.testing.assert_allclose(sta_sum._vs30["vs30"]["value"],
                                   KNOWN_VS30,
                                   rtol=0.01)
        event_df, imc_tables, readme_tables = ws.getTables("processed")
        ws.close()
        check_cols = set([
            "EpicentralDistance",
            "HypocentralDistance",
            "RuptureDistance",
            "RuptureDistanceVar",
            "JoynerBooreDistance",
            "JoynerBooreDistanceVar",
            "GC2_rx",
            "GC2_ry",
            "GC2_ry0",
            "GC2_U",
            "GC2_T",
            "GlobalVs30",
            "BackAzimuth",
        ])
        assert check_cols.issubset(set(readme_tables["Z"]["Column header"]))
        assert check_cols.issubset(set(imc_tables["Z"].columns))
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
def test_StreamCollection():

    # read usc data
    dpath = os.path.join("data", "testdata", "usc", "ci3144585")
    directory = pkg_resources.resource_filename("gmprocess", dpath)
    usc_streams, unprocessed_files, unprocessed_file_errors = directory_to_streams(
        directory)
    assert len(usc_streams) == 7

    usc_sc = StreamCollection(usc_streams)

    # Use print method
    print(usc_sc)

    # Use len method
    assert len(usc_sc) == 3

    # Use nonzero method
    assert bool(usc_sc)

    # Slice
    lengths = [len(usc_sc[0]), len(usc_sc[1]), len(usc_sc[2])]
    sort_lengths = np.sort(lengths)
    assert sort_lengths[0] == 1
    assert sort_lengths[1] == 3
    assert sort_lengths[2] == 3

    # read dmg data
    dpath = os.path.join("data", "testdata", "dmg", "ci3144585")
    directory = pkg_resources.resource_filename("gmprocess", dpath)
    dmg_streams, unprocessed_files, unprocessed_file_errors = directory_to_streams(
        directory)
    assert len(dmg_streams) == 1

    dmg_sc = StreamCollection(dmg_streams)

    # Has one station
    assert len(dmg_sc) == 1
    # With 3 channels
    assert len(dmg_sc[0]) == 3

    # So this should have 4 stations
    test1 = dmg_sc + usc_sc
    assert len(test1) == 4

    test_copy = dmg_sc.copy()
    assert (test_copy[0][0].stats["standard"]["process_level"] ==
            "uncorrected physical units")

    stream1 = test_copy[0]
    test_append = usc_sc.append(stream1)
    assert len(test_append) == 4

    # Change back to unique values for station/network
    for tr in dmg_sc[0]:
        tr.stats["network"] = "LALALA"
        tr.stats["station"] = "575757"
    stream2 = dmg_sc[0]
    test_append = usc_sc.append(stream2)
    assert len(test_append) == 4

    # Check the from_directory method
    sc_test = StreamCollection.from_directory(directory)
    assert len(sc_test) == 1

    # Test to_dataframe
    jsonfile = os.path.join(directory, "event.json")
    with open(jsonfile, "rt", encoding="utf-8") as f:
        origin = json.load(f)
    dmg_df = sc_test.to_dataframe(origin)
    np.testing.assert_allclose(dmg_df["H1"]["PGA"], 0.145615, atol=1e5)

    # Check the from_traces method
    traces = []
    for st in sc_test:
        for tr in st:
            traces.append(tr)
    sc_test = StreamCollection.from_traces(traces)
    assert len(sc_test) == 1