예제 #1
0
def test_get_travel_time_df():
    datapath = os.path.join('data', 'testdata', 'travel_times')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)

    sc1 = StreamCollection.from_directory(os.path.join(datadir, 'ci37218996'))
    sc2 = StreamCollection.from_directory(os.path.join(datadir, 'ci38461735'))
    scs = [sc1, sc2]

    df1, catalog = create_travel_time_dataframe(
        sc1, os.path.join(datadir, 'catalog_test_traveltimes.csv'), 5, 0.1,
        'iasp91')
    df2, catalog = create_travel_time_dataframe(
        sc2, os.path.join(datadir, 'catalog_test_traveltimes.csv'), 5, 0.1,
        'iasp91')

    model = TauPyModel('iasp91')
    for dfidx, df in enumerate([df1, df2]):
        for staidx, sta in enumerate(df):
            for eqidx, time in enumerate(df[sta]):
                sta_coords = scs[dfidx][staidx][0].stats.coordinates
                event = catalog[eqidx]
                dist = locations2degrees(sta_coords['latitude'],
                                         sta_coords['longitude'],
                                         event.latitude, event.longitude)
                if event.depth_km < 0:
                    depth = 0
                else:
                    depth = event.depth_km
                travel_time = model.get_travel_times(depth, dist,
                                                     ['p', 'P', 'Pn'])[0].time
                abs_time = event.time + travel_time
                np.testing.assert_almost_equal(abs_time, time, decimal=1)
예제 #2
0
def test_colocated():
    datapath = os.path.join('data', 'testdata', 'colocated_instruments')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)
    sc = StreamCollection.from_directory(datadir)

    sc.select_colocated()
    assert sc.n_passed == 7
    assert sc.n_failed == 4

    # What if no preference is matched?
    sc = StreamCollection.from_directory(datadir)
    sc.select_colocated(preference=["XX"])
    assert sc.n_passed == 3
    assert sc.n_failed == 8
def test_metrics2():
    eventid = 'usb000syza'
    datafiles, event = read_data_dir('knet', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = get_config()
    config['metrics']['output_imts'].append('Arias')
    config['metrics']['output_imcs'].append('arithmetic_mean')
    # turn off sta/lta check and snr checks
    newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    processed_streams = process_streams(raw_streams, event, config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, processed_streams, label='processed')
        workspace.calcMetrics(event.id, labels=['processed'])
        etable, imc_tables1 = workspace.getTables('processed')
        etable2, imc_tables2 = workspace.getTables('processed', config=config)
        assert 'ARITHMETIC_MEAN' not in imc_tables1
        assert 'ARITHMETIC_MEAN' in imc_tables2
        assert 'ARIAS' in imc_tables2['ARITHMETIC_MEAN']
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
예제 #4
0
def test_colocated():
    eventid = 'ci38445975'
    datafiles, event = read_data_dir('fdsn', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config_file = os.path.join(datadir, 'test_config.yml')
    with open(config_file, 'r') as f:
        config = yaml.load(f, Loader=yaml.FullLoader)
    processed_streams = process_streams(raw_streams, event, config=config)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        ws = StreamWorkspace(tfile)
        ws.addEvent(event)
        ws.addStreams(event, raw_streams, label='raw')
        ws.addStreams(event, processed_streams, label='processed')
        ws.calcMetrics(eventid, labels=['processed'], config=config)
        stasum = ws.getStreamMetrics(eventid, 'CI', 'MIKB', 'processed')
        np.testing.assert_allclose(
            stasum.get_pgm('duration', 'geometric_mean'), 38.94480068)
        ws.close()
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
def test_get_status():
    dpath = os.path.join('data', 'testdata', 'status')
    directory = pkg_resources.resource_filename('gmprocess', dpath)
    sc = StreamCollection.from_directory(directory)

    # Manually fail some of the streams
    sc.select(station='BSAP')[0][0].fail('Failure 0')
    sc.select(station='CPE')[0][0].fail('Failure 1')
    sc.select(station='MIKB', instrument='HN')[0][0].fail('Failure 2')
    sc.select(network='PG', station='PSD')[0][0].fail('Failure 3')

    # Test results from 'short', 'long', and 'net
    short = sc.get_status('short')
    assert (short == 1).all()

    long = sc.get_status('long')
    assert long.at['AZ.BSAP.HN'] == 'Failure 0'
    assert long.at['AZ.BZN.HN'] == ''
    assert long.at['AZ.CPE.HN'] == 'Failure 1'
    assert long.at['CI.MIKB.BN'] == ''
    assert long.at['CI.MIKB.HN'] == 'Failure 2'
    assert long.at['CI.PSD.HN'] == ''
    assert long.at['PG.PSD.HN'] == 'Failure 3'

    net = sc.get_status('net')
    assert net.at['AZ', 'number passed'] == 1
    assert net.at['AZ', 'number failed'] == 2
    assert net.at['CI', 'number passed'] == 2
    assert net.at['CI', 'number failed'] == 1
    assert net.at['PG', 'number passed'] == 0
    assert net.at['PG', 'number failed'] == 1
예제 #6
0
def test_fit_spectra():
    config = get_config()
    datapath = os.path.join('data', 'testdata', 'demo', 'ci38457511', 'raw')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)
    event = get_event_object('ci38457511')
    sc = StreamCollection.from_directory(datadir)
    for st in sc:
        st = signal_split(st, event)
        end_conf = config['windows']['signal_end']
        st = signal_end(st,
                        event_time=event.time,
                        event_lon=event.longitude,
                        event_lat=event.latitude,
                        event_mag=event.magnitude,
                        **end_conf)
        st = compute_snr(st, 30)
        st = get_corner_frequencies(st,
                                    method='constant',
                                    constant={
                                        'highpass': 0.08,
                                        'lowpass': 20.0
                                    })

    for st in sc:
        spectrum.fit_spectra(st, event)
예제 #7
0
def test_metrics2():
    eventid = 'usb000syza'
    datafiles, event = read_data_dir('knet', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = update_config(os.path.join(datadir, 'config_min_freq_0p2.yml'))
    config['metrics']['output_imts'].append('Arias')
    config['metrics']['output_imcs'].append('arithmetic_mean')
    # turn off sta/lta check and snr checks
    newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    processed_streams = process_streams(raw_streams, event, config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, processed_streams, label='processed')
        workspace.calcMetrics(event.id, labels=['processed'])
        etable, imc_tables1, readmes1 = workspace.getTables('processed')
        assert 'ARITHMETIC_MEAN' not in imc_tables1
        assert 'ARITHMETIC_MEAN' not in readmes1
        del workspace.dataset.auxiliary_data.WaveFormMetrics
        del workspace.dataset.auxiliary_data.StationMetrics
        workspace.calcMetrics(event.id, labels=['processed'], config=config)
        etable2, imc_tables2, readmes2 = workspace.getTables('processed')
        assert 'ARITHMETIC_MEAN' in imc_tables2
        assert 'ARITHMETIC_MEAN' in readmes2
        assert 'ARIAS' in imc_tables2['ARITHMETIC_MEAN']
        testarray = readmes2['ARITHMETIC_MEAN']['Column header'].to_numpy()
        assert 'ARIAS' in testarray
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
예제 #8
0
def test_lowpass_max():
    datapath = os.path.join('data', 'testdata', 'lowpass_max')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)
    sc = StreamCollection.from_directory(datadir)
    sc.describe()

    conf = get_config()
    update = {
        'processing': [
            {'detrend': {'detrending_method': 'demean'}},
            {'remove_response': {
                'f1': 0.001, 'f2': 0.005, 'f3': None, 'f4': None,
                'output': 'ACC', 'water_level': 60}
             },
            #            {'detrend': {'detrending_method': 'linear'}},
            #            {'detrend': {'detrending_method': 'demean'}},
            {'get_corner_frequencies': {
                'constant': {
                    'highpass': 0.08, 'lowpass': 20.0
                },
                'method': 'constant',
                'snr': {'same_horiz': True}}
             },
            {'lowpass_max_frequency': {'fn_fac': 0.9}}
        ]
    }
    update_dict(conf, update)
    update = {
        'windows': {
            'signal_end': {
                'method': 'model',
                'vmin': 1.0,
                'floor': 120,
                'model': 'AS16',
                'epsilon': 2.0
            },
            'window_checks': {
                'do_check': False,
                'min_noise_duration': 1.0,
                'min_signal_duration': 1.0
            }
        }
    }
    update_dict(conf, update)
    edict = {
        'id': 'ci38038071',
        'time': UTCDateTime('2018-08-30 02:35:36'),
        'lat': 34.136,
        'lon': -117.775,
        'depth': 5.5,
        'magnitude': 4.4
    }
    event = get_event_object(edict)
    test = process_streams(sc, event, conf)
    for st in test:
        for tr in st:
            freq_dict = tr.getParameter('corner_frequencies')
            np.testing.assert_allclose(freq_dict['lowpass'], 18.0)
예제 #9
0
def test_num_horizontals():
    data_path = pkg_resources.resource_filename('gmprocess', 'data')
    sc = StreamCollection.from_directory(os.path.join(
        data_path, 'testdata', 'fdsn', 'uw61251926', 'strong_motion'))
    st = sc.select(station='SP2')[0]
    assert st.num_horizontal == 2

    for tr in st:
        tr.stats.channel = 'ENZ'
    assert st.num_horizontal == 0

    for tr in st:
        tr.stats.channel = 'EN1'
    assert st.num_horizontal == 3
예제 #10
0
def test_metrics():
    eventid = 'usb000syza'
    datafiles, event = read_data_dir('knet', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = get_config()
    # turn off sta/lta check and snr checks
    newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    processed_streams = process_streams(raw_streams, event, config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, processed_streams, label='processed')
        stream1 = processed_streams[0]
        stream2 = processed_streams[1]
        summary1 = StationSummary.from_config(stream1)
        summary2 = StationSummary.from_config(stream2)
        workspace.setStreamMetrics(event.id, 'processed', summary1)
        workspace.setStreamMetrics(event.id, 'processed', summary2)
        workspace.calcStationMetrics(event.id, labels=['processed'])
        summary1_a = workspace.getStreamMetrics(event.id,
                                                stream1[0].stats.station,
                                                'processed')
        s1_df_in = summary1.pgms.sort_values(['IMT', 'IMC'])
        s1_df_out = summary1_a.pgms.sort_values(['IMT', 'IMC'])
        array1 = s1_df_in['Result'].as_matrix()
        array2 = s1_df_out['Result'].as_matrix()
        np.testing.assert_almost_equal(array1, array2, decimal=4)

        df = workspace.getMetricsTable(event.id)
        cmp_series = {
            'GREATER_OF_TWO_HORIZONTALS': 0.6787,
            'H1': 0.3869,
            'H2': 0.6787,
            'Z': 0.7663
        }
        pga_dict = df.iloc[0]['PGA'].to_dict()
        for key, value in pga_dict.items():
            value2 = cmp_series[key]
            np.testing.assert_almost_equal(value, value2, decimal=4)

        workspace.close()
    except Exception as e:
        raise(e)
    finally:
        shutil.rmtree(tdir)
def test_metrics():
    eventid = 'usb000syza'
    datafiles, event = read_data_dir('knet',
                                     eventid,
                                     '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = get_config()
    # turn off sta/lta check and snr checks
    newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    processed_streams = process_streams(raw_streams, event, config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, processed_streams, label='processed')
        stream1 = processed_streams[0]
        stream2 = processed_streams[1]
        summary1 = StationSummary.from_config(stream1)
        summary2 = StationSummary.from_config(stream2)
        workspace.setStreamMetrics(event.id, 'processed', summary1)
        workspace.setStreamMetrics(event.id, 'processed', summary2)
        summary1_a = workspace.getStreamMetrics(event.id,
                                                stream1[0].stats.station,
                                                'processed')
        s1_df_in = summary1.pgms.sort_values(['IMT', 'IMC'])
        s1_df_out = summary1_a.pgms.sort_values(['IMT', 'IMC'])
        array1 = s1_df_in['Result'].as_matrix()
        array2 = s1_df_out['Result'].as_matrix()
        np.testing.assert_almost_equal(array1, array2, decimal=4)

        df = workspace.getMetricsTable(event.id)
        cmp_series = {'GREATER_OF_TWO_HORIZONTALS': 0.6787,
                      'HN1': 0.3869,
                      'HN2': 0.6787,
                      'HNZ': 0.7663}
        pga_dict = df.iloc[0]['PGA'].to_dict()
        for key, value in pga_dict.items():
            value2 = cmp_series[key]
            np.testing.assert_almost_equal(value, value2, decimal=4)

        workspace.close()
    except Exception as e:
        raise(e)
    finally:
        shutil.rmtree(tdir)
예제 #12
0
def test_allow_nans():
    dpath = os.path.join('data', 'testdata', 'fdsn', 'uu60363602')
    datadir = pkg_resources.resource_filename('gmprocess', dpath)
    sc = StreamCollection.from_directory(datadir)
    origin = read_event_json_files([os.path.join(datadir, 'event.json')])[0]
    psc = process_streams(sc, origin)
    st = psc[0]

    ss = StationSummary.from_stream(
        st, components=['quadratic_mean'], imts=['FAS(4.0)'], bandwidth=189,
        allow_nans=True)
    assert np.isnan(ss.pgms.Result).all()

    ss = StationSummary.from_stream(
        st, components=['quadratic_mean'], imts=['FAS(4.0)'], bandwidth=189,
        allow_nans=False)
    assert ~np.isnan(ss.pgms.Result).all()
예제 #13
0
def test_metrics():
    eventid = 'usb000syza'
    datafiles, event = read_data_dir('knet', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = update_config(os.path.join(datadir, 'config_min_freq_0p2.yml'))
    # turn off sta/lta check and snr checks
    # newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    # processed_streams = process_streams(raw_streams, event, config=newconfig)
    newconfig = config.copy()
    newconfig['processing'].append(
        {'NNet_QA': {
            'acceptance_threshold': 0.5,
            'model_name': 'CantWell'
        }})
    processed_streams = process_streams(raw_streams.copy(),
                                        event,
                                        config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, raw_streams, label='raw')
        workspace.addStreams(event, processed_streams, label='processed')
        stream1 = raw_streams[0]
        summary1 = StationSummary.from_config(stream1)
        s1_df_in = summary1.pgms.sort_values(['IMT', 'IMC'])
        array1 = s1_df_in['Result'].to_numpy()
        workspace.calcMetrics(eventid, labels=['raw'])
        pstreams2 = workspace.getStreams(event.id, labels=['processed'])
        assert pstreams2[0].getStreamParamKeys() == ['nnet_qa']
        summary1_a = workspace.getStreamMetrics(event.id,
                                                stream1[0].stats.network,
                                                stream1[0].stats.station,
                                                'raw')
        s1_df_out = summary1_a.pgms.sort_values(['IMT', 'IMC'])
        array2 = s1_df_out['Result'].to_numpy()
        np.testing.assert_almost_equal(array1, array2, decimal=4)

        workspace.close()
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
예제 #14
0
def test_zero_crossings():
    datapath = os.path.join('data', 'testdata', 'zero_crossings')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)
    sc = StreamCollection.from_directory(datadir)
    sc.describe()

    conf = get_config()

    update = {
        'processing': [{
            'detrend': {
                'detrending_method': 'demean'
            }
        }, {
            'check_zero_crossings': {
                'min_crossings': 1
            }
        }]
    }
    update_dict(conf, update)

    edict = {
        'id': 'ak20419010',
        'time': UTCDateTime('2018-11-30T17:29:29'),
        'lat': 61.346,
        'lon': -149.955,
        'depth': 46.7,
        'magnitude': 7.1
    }
    event = get_event_object(edict)
    test = process_streams(sc, event, conf)
    for st in test:
        for tr in st:
            assert tr.hasParameter('ZeroCrossingRate')
    np.testing.assert_allclose(
        test[0][0].getParameter('ZeroCrossingRate')['crossing_rate'],
        0.008888888888888889,
        atol=1e-5)
예제 #15
0
def test_trim_multiple_events():
    datapath = os.path.join('data', 'testdata', 'multiple_events')
    datadir = pkg_resources.resource_filename('gmprocess', datapath)
    sc = StreamCollection.from_directory(os.path.join(datadir, 'ci38457511'))
    origin = get_event_object('ci38457511')
    df, catalog = create_travel_time_dataframe(
        sc, os.path.join(datadir, 'catalog.csv'), 5, 0.1, 'iasp91')
    for st in sc:
        st.detrend('demean')
        remove_response(st, None, None)
        st = corner_frequencies.constant(st)
        lowpass_filter(st)
        highpass_filter(st)
        signal_split(st, origin)
        signal_end(st,
                   origin.time,
                   origin.longitude,
                   origin.latitude,
                   origin.magnitude,
                   method='model',
                   model='AS16')
        cut(st, 2)
        trim_multiple_events(st, origin, catalog, df, 0.2, 0.7, 'B14',
                             {'vs30': 760}, {'rake': 0})

    num_failures = sum([1 if not st.passed else 0 for st in sc])
    assert num_failures == 1

    failure = sc.select(station='WRV2')[0][0].getParameter('failure')
    assert failure['module'] == 'trim_multiple_events'
    assert failure['reason'] == ('A significant arrival from another event '
                                 'occurs within the first 70.0 percent of the '
                                 'signal window')

    for tr in sc.select(station='JRC2')[0]:
        np.testing.assert_almost_equal(
            tr.stats.endtime, UTCDateTime('2019-07-06T03:20:38.7983Z'))
from gmprocess.streamcollection import StreamCollection
from gmprocess.config import get_config
from gmprocess.processing import process_streams
from gmprocess.event import get_event_object

gm_home = "/Users/gabriel/groundmotion-processing/"
# data = "/gmprocess/data/testdata/knet/us2000cnnl/"
data = "/data/testdata/demo/ci38457511/raw/"
output = "/Users/gabriel/groundmotion-processing/output/"

# Path to example data
datapath = os.path.join('data', 'testdata', 'demo', 'ci38457511', 'raw')
datadir = pkg_resources.resource_filename('gmprocess', datapath)

sc = StreamCollection.from_directory(datadir)

# Includes 3 StationStreams, each with 3 StationTraces
sc.describe()

# Get the default config file
conf = get_config()

# Get event object
event = get_event_object('ci38457511')

# Process the streams
psc = process_streams(sc, event, conf)
psc.describe()

# Save plots of processed records
예제 #17
0
def test_vs30_dist_metrics():
    KNOWN_DISTANCES = {
        'epicentral': 5.1,
        'hypocentral': 10.2,
        'rupture': 2.21,
        'rupture_var': np.nan,
        'joyner_boore': 2.21,
        'joyner_boore_var': np.nan,
        'gc2_rx': 2.66,
        'gc2_ry': 3.49,
        'gc2_ry0': 0.00,
        'gc2_U': 34.34,
        'gc2_T': 2.66
    }
    KNOWN_BAZ = 239.46
    KNOWN_VS30 = 331.47

    eventid = 'ci38457511'
    datafiles, event = read_data_dir('fdsn', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = update_config(os.path.join(datadir, 'config_min_freq_0p2.yml'))
    processed_streams = process_streams(raw_streams, event, config=config)
    rupture_file = get_rupture_file(datadir)
    grid_file = os.path.join(datadir, 'test_grid.grd')
    config['metrics']['vs30'] = {
        'vs30': {
            'file': grid_file,
            'column_header': 'GlobalVs30',
            'readme_entry': 'GlobalVs30',
            'units': 'm/s'
        }
    }
    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        ws = StreamWorkspace(tfile)
        ws.addEvent(event)
        ws.addStreams(event, raw_streams, label='raw')
        ws.addStreams(event, processed_streams, label='processed')
        ws.calcMetrics(event.id,
                       rupture_file=rupture_file,
                       labels=['processed'],
                       config=config)
        sta_sum = ws.getStreamMetrics(event.id, 'CI', 'CLC', 'processed')

        for dist in sta_sum.distances:
            np.testing.assert_allclose(sta_sum.distances[dist],
                                       KNOWN_DISTANCES[dist],
                                       rtol=0.01)
        np.testing.assert_allclose(sta_sum._back_azimuth, KNOWN_BAZ, rtol=0.01)
        np.testing.assert_allclose(sta_sum._vs30['vs30']['value'],
                                   KNOWN_VS30,
                                   rtol=0.01)
        event_df, imc_tables, readme_tables = ws.getTables('processed')
        check_cols = set([
            'EpicentralDistance', 'HypocentralDistance', 'RuptureDistance',
            'RuptureDistanceVar', 'JoynerBooreDistance',
            'JoynerBooreDistanceVar', 'GC2_rx', 'GC2_ry', 'GC2_ry0', 'GC2_U',
            'GC2_T', 'GlobalVs30', 'BackAzimuth'
        ])
        assert check_cols.issubset(set(readme_tables['Z']['Column header']))
        assert check_cols.issubset(set(imc_tables['Z'].columns))
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
예제 #18
0
def test_StreamCollection():

    # read usc data
    dpath = os.path.join('data', 'testdata', 'usc', 'ci3144585')
    directory = pkg_resources.resource_filename('gmprocess', dpath)
    usc_streams, unprocessed_files, unprocessed_file_errors = \
        directory_to_streams(directory)
    assert len(usc_streams) == 7

    usc_sc = StreamCollection(usc_streams)

    # Use print method
    print(usc_sc)

    # Use len method
    assert len(usc_sc) == 3

    # Use nonzero method
    assert bool(usc_sc)

    # Slice
    lengths = [len(usc_sc[0]), len(usc_sc[1]), len(usc_sc[2])]
    sort_lengths = np.sort(lengths)
    assert sort_lengths[0] == 1
    assert sort_lengths[1] == 3
    assert sort_lengths[2] == 3

    # read dmg data
    dpath = os.path.join('data', 'testdata', 'dmg', 'ci3144585')
    directory = pkg_resources.resource_filename('gmprocess', dpath)
    dmg_streams, unprocessed_files, unprocessed_file_errors = \
        directory_to_streams(directory)
    assert len(dmg_streams) == 1

    dmg_sc = StreamCollection(dmg_streams)

    # Has one station
    assert len(dmg_sc) == 1
    # With 3 channels
    assert len(dmg_sc[0]) == 3

    # So this should have 4 stations
    test1 = dmg_sc + usc_sc
    assert len(test1) == 4

    test_copy = dmg_sc.copy()
    assert test_copy[0][0].stats['standard']['process_level'] == \
        'uncorrected physical units'

    stream1 = test_copy[0]
    test_append = usc_sc.append(stream1)
    assert len(test_append) == 4

    # Change back to unique values for station/network
    for tr in dmg_sc[0]:
        tr.stats['network'] = 'LALALA'
        tr.stats['station'] = '575757'
    stream2 = dmg_sc[0]
    test_append = usc_sc.append(stream2)
    assert len(test_append) == 4

    # Check the from_directory method
    sc_test = StreamCollection.from_directory(directory)
    assert len(sc_test) == 1

    # Test to_dataframe
    jsonfile = os.path.join(directory, 'event.json')
    with open(jsonfile, 'rt') as f:
        origin = json.load(f)
    dmg_df = sc_test.to_dataframe(origin)
    np.testing.assert_allclose(dmg_df['H1']['PGA'], 0.145615, atol=1e5)

    # Check the from_traces method
    traces = []
    for st in sc_test:
        for tr in st:
            traces.append(tr)
    sc_test = StreamCollection.from_traces(traces)
    assert len(sc_test) == 1
예제 #19
0
def test_StreamCollection():

    # read usc data
    dpath = os.path.join('data', 'testdata', 'usc', 'ci3144585')
    directory = pkg_resources.resource_filename('gmprocess', dpath)
    usc_streams, unprocessed_files, unprocessed_file_errors = \
        directory_to_streams(directory)
    assert len(usc_streams) == 7

    usc_sc = StreamCollection(usc_streams)

    # Use print method
    print(usc_sc)

    # Use len method
    assert len(usc_sc) == 3

    # Use nonzero method
    assert bool(usc_sc)

    # Slice
    lengths = [
        len(usc_sc[0]),
        len(usc_sc[1]),
        len(usc_sc[2])
    ]
    sort_lengths = np.sort(lengths)
    assert sort_lengths[0] == 1
    assert sort_lengths[1] == 3
    assert sort_lengths[2] == 3

    # read dmg data
    dpath = os.path.join('data', 'testdata', 'dmg', 'ci3144585')
    directory = pkg_resources.resource_filename('gmprocess', dpath)
    dmg_streams, unprocessed_files, unprocessed_file_errors = \
        directory_to_streams(directory)
    assert len(dmg_streams) == 1

    dmg_sc = StreamCollection(dmg_streams)

    # Has one station
    assert len(dmg_sc) == 1
    # With 3 channels
    assert len(dmg_sc[0]) == 3

    # So this should have 4 stations
    test1 = dmg_sc + usc_sc
    assert len(test1) == 4

    # Overwrite the dmg station and network to force it to be
    # a duplicate of one of the stations in usc_sc to check if
    # validation works with these addition methods
    for tr in dmg_sc[0]:
        tr.stats['network'] = 'LA'
        tr.stats['station'] = '57'

    test3 = dmg_sc + usc_sc
    assert len(test3) == 3
    # usc_sc has 1 channel for station 57 and the modified
    # dmg_sc has 3 channels so the combined StreamCollection
    # should have 4
    assert len(test3[0]) == 4

    test_copy = dmg_sc.copy()
    assert test_copy[0][0].stats['standard']['process_level'] == \
        'corrected physical units'

    # Appending dmg should not add to length because of the
    # overwriting of the station/network above
    stream1 = test_copy[0]
    test_append = usc_sc.append(stream1)
    assert len(test_append) == 3

    # Change back to unique values for station/network
    for tr in dmg_sc[0]:
        tr.stats['network'] = 'LALALA'
        tr.stats['station'] = '575757'
    stream2 = dmg_sc[0]
    test_append = usc_sc.append(stream2)
    assert len(test_append) == 4

    # Check the from_directory method
    sc_test = StreamCollection.from_directory(directory)
    assert len(sc_test) == 1

    # Test to_dataframe
    jsonfile = os.path.join(directory, 'event.json')
    with open(jsonfile, 'rt') as f:
        origin = json.load(f)
    dmg_df = sc_test.to_dataframe(origin)
    np.testing.assert_allclose(
        dmg_df['HN1']['PGA'],
        0.145615,
        atol=1e5)
def test_StreamCollection():

    # read usc data
    dpath = os.path.join('data', 'testdata', 'usc', 'ci3144585')
    directory = pkg_resources.resource_filename('gmprocess', dpath)
    usc_streams, unprocessed_files, unprocessed_file_errors = \
        directory_to_streams(directory)
    assert len(usc_streams) == 7

    usc_sc = StreamCollection(usc_streams)

    # Use print method
    print(usc_sc)

    # Use len method
    assert len(usc_sc) == 3

    # Use nonzero method
    assert bool(usc_sc)

    # Slice
    lengths = [
        len(usc_sc[0]),
        len(usc_sc[1]),
        len(usc_sc[2])
    ]
    sort_lengths = np.sort(lengths)
    assert sort_lengths[0] == 1
    assert sort_lengths[1] == 3
    assert sort_lengths[2] == 3

    # read dmg data
    dpath = os.path.join('data', 'testdata', 'dmg', 'ci3144585')
    directory = pkg_resources.resource_filename('gmprocess', dpath)
    dmg_streams, unprocessed_files, unprocessed_file_errors = \
        directory_to_streams(directory)
    assert len(dmg_streams) == 1

    dmg_sc = StreamCollection(dmg_streams)

    # Has one station
    assert len(dmg_sc) == 1
    # With 3 channels
    assert len(dmg_sc[0]) == 3

    # So this should have 4 stations
    test1 = dmg_sc + usc_sc
    assert len(test1) == 4

    test_copy = dmg_sc.copy()
    assert test_copy[0][0].stats['standard']['process_level'] == \
        'corrected physical units'

    stream1 = test_copy[0]
    test_append = usc_sc.append(stream1)
    assert len(test_append) == 4

    # Change back to unique values for station/network
    for tr in dmg_sc[0]:
        tr.stats['network'] = 'LALALA'
        tr.stats['station'] = '575757'
    stream2 = dmg_sc[0]
    test_append = usc_sc.append(stream2)
    assert len(test_append) == 4

    # Check the from_directory method
    sc_test = StreamCollection.from_directory(directory)
    assert len(sc_test) == 1

    # Test to_dataframe
    jsonfile = os.path.join(directory, 'event.json')
    with open(jsonfile, 'rt') as f:
        origin = json.load(f)
    dmg_df = sc_test.to_dataframe(origin)
    np.testing.assert_allclose(
        dmg_df['HN1']['PGA'],
        0.145615,
        atol=1e5)