Ejemplo n.º 1
0
def test_unam():
    datafiles, origin = read_data_dir('unam', 'us2000ar20',
                                      ['CANA1709.191', 'PZPU1709.191'])

    # make sure format checker works
    assert is_unam(datafiles[0])

    stream = read_unam(datafiles[0])[0]
    trace1 = stream[0]
    trace2 = stream[1]
    trace3 = stream[2]

    np.testing.assert_almost_equal(trace1.stats.coordinates.latitude,
                                   18.567007)
    np.testing.assert_almost_equal(trace1.stats.coordinates.longitude,
                                   -101.977162)
    assert trace1.stats.sampling_rate == 200.0

    np.testing.assert_almost_equal(trace1.max(), 9.14, decimal=2)
    np.testing.assert_almost_equal(trace2.max(), 9.24, decimal=2)
    np.testing.assert_almost_equal(trace3.max(), -7.87, decimal=2)

    # second file has something strange going on...
    stream2 = read_unam(datafiles[1])[0]
    trace1 = stream2[0]
    assert np.isnan(trace1.stats.standard.instrument_period)
    assert np.isnan(trace1.stats.standard.instrument_damping)

    # make sure the reader doesn't raise exceptions on non-UNAM files
    datafiles, origin = read_data_dir(
        'fdsn', 'nc72282711',
        ['BK.CMB.00.HNE__20140824T102014Z__20140824T102244Z.mseed'])
    assert is_unam(datafiles[0]) is False
Ejemplo n.º 2
0
def test():
    # Test for channel grouping with three unique channels
    streams = []
    # datadir = os.path.join(homedir, '..', 'data', 'knet', 'us2000cnnl')
    datafiles, origin = read_data_dir('knet', 'us2000cnnl',
                                      'AOM0031801241951*')
    for datafile in datafiles:
        streams += read_knet(datafile)
    grouped_streams = StreamCollection(streams)
    assert len(grouped_streams) == 1
    assert grouped_streams[0].count() == 3

    # Test for channel grouping with more file types
    datafiles, origin = read_data_dir('geonet', 'us1000778i',
                                      '20161113_110313_THZ_20.V2A')
    datafile = datafiles[0]
    streams += read_geonet(datafile)
    grouped_streams = StreamCollection(streams)
    assert len(grouped_streams) == 2
    assert grouped_streams[0].count() == 3
    assert grouped_streams[1].count() == 3

    # Test for warning for one channel streams
    datafiles, origin = read_data_dir('knet', 'us2000cnnl',
                                      'AOM0071801241951.UD')
    datafile = datafiles[0]
    streams += read_knet(datafile)

    grouped_streams = StreamCollection(streams)
    #    assert "One channel stream:" in logstream.getvalue()

    assert len(grouped_streams) == 3
    assert grouped_streams[0].count() == 3
    assert grouped_streams[1].count() == 3
    assert grouped_streams[2].count() == 1
Ejemplo n.º 3
0
def test():
    datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'BK.CMB*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_fdsn(datafile)

    assert streams[0].get_id() == 'BK.CMB.HN'

    datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'TA.M04C*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_fdsn(datafile)

    assert streams[0].get_id() == 'TA.M04C.HN'

    # test assignment of Z channel
    datafiles, origin = read_data_dir('fdsn', 'nc73300395', 'BK.VALB*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_fdsn(datafile)

    # get all channel names
    channels = sorted([st[0].stats.channel for st in streams])
    assert channels == ['HN2', 'HN3', 'HNZ']

    # DEBUGGING
    sc = StreamCollection(streams)
    psc = process_streams(sc, origin)
def get_streams():
    datafiles1, origin1 = read_data_dir('cwb', 'us1000chhc', '*.dat')
    datafiles2, origin2 = read_data_dir('nsmn', 'us20009ynd', '*.txt')
    datafiles3, origin3 = read_data_dir('geonet', 'us1000778i', '*.V1A')
    datafiles = datafiles1 + datafiles2 + datafiles3
    streams = []
    for datafile in datafiles:
        streams += read_data(datafile)

    return StreamCollection(streams)
Ejemplo n.º 5
0
def get_streams():
    datafiles1, origin1 = read_data_dir('cwb', 'us1000chhc', '*.dat')
    datafiles2, origin2 = read_data_dir('nsmn', 'us20009ynd', '*.txt')
    datafiles3, origin3 = read_data_dir('geonet', 'us1000778i', '*.V1A')
    datafiles = datafiles1 + datafiles2 + datafiles3
    streams = []
    for datafile in datafiles:
        streams += read_data(datafile)

    return StreamCollection(streams)
Ejemplo n.º 6
0
def test():
    datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'BK.CMB*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_fdsn(datafile)

    assert streams[0].get_id() == 'BK.CMB.HN'

    datafiles, origin = read_data_dir('fdsn', 'nc72282711', 'TA.M04C*.mseed')
    streams = []
    for datafile in datafiles:
        streams += read_fdsn(datafile)

    assert streams[0].get_id() == 'TA.M04C.HN'
Ejemplo n.º 7
0
def test_free_field():
    data_files, origin = read_data_dir('kiknet', 'usp000hzq8')

    raw_streams = []
    for dfile in data_files:
        raw_streams += read_data(dfile)

    sc = StreamCollection(raw_streams)

    processed_streams = process_streams(sc, origin)

    # all of these streams should have failed for different reasons
    npassed = np.sum([pstream.passed for pstream in processed_streams])
    assert npassed == 0
    for pstream in processed_streams:
        is_free = pstream[0].free_field
        reason = ''
        for trace in pstream:
            if trace.hasParameter('failure'):
                reason = trace.getParameter('failure')['reason']
                break
        if is_free:
            assert reason.startswith('Failed sta/lta check')
        else:
            assert reason == 'Failed free field sensor check.'
Ejemplo n.º 8
0
def test_renadic():
    datafiles, origin = read_data_dir('renadic',
                                      'official20100227063411530_30')

    # make sure format checker works
    assert is_renadic(datafiles[0])

    raw_streams = []
    for dfile in datafiles:
        print('Reading file %s...' % dfile)
        raw_streams += read_renadic(dfile)

    # following pga values in G taken from file headers
    peaks = {
        '672': (-0.030, -0.016, -0.008),
        '5014': (0.295, -0.155, 0.421),
        '0': (0.020, -0.019, -0.010)
    }

    for stream in raw_streams:
        if stream[0].stats.station not in peaks:
            continue
        cmp_value = np.abs(np.array(peaks[stream[0].stats.station]))
        pga1 = np.abs(stream[0].max())
        pga2 = np.abs(stream[1].max())
        pga3 = np.abs(stream[2].max())
        tpl = np.array((pga1, pga2, pga3)) / 980
        np.testing.assert_almost_equal(cmp_value, tpl, decimal=3)
Ejemplo n.º 9
0
def test_orientation_relative():
    dfiles, event = read_data_dir('cosmos', 'ak018fcnsk91',
                                  ['NP8040-n.1000hyfh.HNE.01.V0c'])
    streams = read_cosmos(dfiles[0])
    trace = streams[0][0]
    assert trace.stats.channel == 'HN2'
    assert streams[0][0].stats.standard.horizontal_orientation == 90.0
Ejemplo n.º 10
0
def generate_workspace():
    """Generate simple HDF5 with ASDF layout for testing.
    """
    PCOMMANDS = [
        'assemble',
        'process',
    ]
    EVENTID = 'us1000778i'
    LABEL = 'ptest'
    datafiles, event = read_data_dir('geonet', EVENTID, '*.V1A')

    tdir = tempfile.mkdtemp()
    tfilename = os.path.join(tdir, 'workspace.h5')

    raw_data = []
    for dfile in datafiles:
        raw_data += read_data(dfile)
    write_asdf(tfilename, raw_data, event, label="unprocessed")
    del raw_data

    config = get_config()
    workspace = StreamWorkspace.open(tfilename)
    raw_streams = workspace.getStreams(EVENTID, labels=['unprocessed'])
    pstreams = process_streams(raw_streams, event, config=config)
    workspace.addStreams(event, pstreams, label=LABEL)
    workspace.calcMetrics(event.id, labels=[LABEL], config=config)

    return tfilename
Ejemplo n.º 11
0
def test_stream_params():
    eventid = 'us1000778i'
    datafiles, event = read_data_dir(
        'geonet',
        eventid,
        '20161113_110259_WTMC_20.V1A'
    )
    tdir = tempfile.mkdtemp()
    streams = []
    try:
        streams += read_data(datafiles[0])
        statsdict = {'name': 'Fred', 'age': 34}
        streams[0].setStreamParam('stats', statsdict)
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, streams, label='stats')
        outstreams = workspace.getStreams(event.id, labels=['stats'])
        cmpdict = outstreams[0].getStreamParam('stats')
        assert cmpdict == statsdict
        workspace.close()
    except Exception as e:
        raise(e)
    finally:
        shutil.rmtree(tdir)
Ejemplo n.º 12
0
def test_exceptions():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    stream1 = stream_v2.select(channel="HN1")
    try:
        StationSummary.from_stream(stream1, ['rotd50'], ['pga'])
        sucess = True
    except PGMException:
        sucess = False
    assert sucess == False

    stream2 = Stream(
        [stream_v2.select(channel="HN1")[0],
            Trace(data=np.asarray([]), header={"channel": "HN2"})])
    try:
        StationSummary.from_stream(stream2,
                                   ['rotd50'], ['pga'])
        sucess = True
    except PGMException:
        sucess = False
    assert sucess == False

    for trace in stream_v2:
        stream1.append(trace)
    try:
        StationSummary.from_stream(stream1, ['rotd50'], ['pga'])
        sucess = True
    except PGMException:
        sucess = False
    assert sucess == False
def _test_colocated():
    eventid = 'ci38445975'
    datafiles, event = read_data_dir('fdsn', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config_file = os.path.join(datadir, 'test_config.yml')
    with open(config_file, 'r', encoding='utf-8') as f:
        config = yaml.load(f, Loader=yaml.FullLoader)
    processed_streams = process_streams(raw_streams, event, config=config)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        ws = StreamWorkspace(tfile)
        ws.addEvent(event)
        ws.addStreams(event, raw_streams, label='raw')
        ws.addStreams(event, processed_streams, label='processed')
        ws.calcMetrics(eventid, labels=['processed'], config=config)
        stasum = ws.getStreamMetrics(eventid, 'CI', 'MIKB', 'processed')
        np.testing.assert_allclose(
            stasum.get_pgm('duration', 'geometric_mean'), 38.94480068)
        ws.close()
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
def test_signal_split2():
    datafiles, origin = read_data_dir(
        'knet', 'us2000cnnl', 'AOM0011801241951*')
    streams = []
    for datafile in datafiles:
        streams += read_data(datafile)

    streams = StreamCollection(streams)
    stream = streams[0]
    signal_split(stream, origin)

    cmpdict = {
        'split_time': UTCDateTime(2018, 1, 24, 10, 51, 39, 841483),
        'method': 'p_arrival',
        'picker_type': 'travel_time'}

    pdict = stream[0].getParameter('signal_split')
    for key, value in cmpdict.items():
        v1 = pdict[key]
        # because I can't figure out how to get utcdattime __eq__
        # operator to behave as expected with the currently installed
        # version of obspy, we're going to pedantically compare two
        # of these objects...
        if isinstance(value, UTCDateTime):
            #value.__precision = 4
            #v1.__precision = 4
            assert value.year == v1.year
            assert value.month == v1.month
            assert value.day == v1.day
            assert value.hour == v1.hour
            assert value.minute == v1.minute
            assert value.second == v1.second
        else:
            assert v1 == value
Ejemplo n.º 15
0
def test_metrics2():
    eventid = 'usb000syza'
    datafiles, event = read_data_dir('knet', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = update_config(os.path.join(datadir, 'config_min_freq_0p2.yml'))
    config['metrics']['output_imts'].append('Arias')
    config['metrics']['output_imcs'].append('arithmetic_mean')
    # turn off sta/lta check and snr checks
    newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    processed_streams = process_streams(raw_streams, event, config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, processed_streams, label='processed')
        workspace.calcMetrics(event.id, labels=['processed'])
        etable, imc_tables1, readmes1 = workspace.getTables('processed')
        assert 'ARITHMETIC_MEAN' not in imc_tables1
        assert 'ARITHMETIC_MEAN' not in readmes1
        del workspace.dataset.auxiliary_data.WaveFormMetrics
        del workspace.dataset.auxiliary_data.StationMetrics
        workspace.calcMetrics(event.id, labels=['processed'], config=config)
        etable2, imc_tables2, readmes2 = workspace.getTables('processed')
        assert 'ARITHMETIC_MEAN' in imc_tables2
        assert 'ARITHMETIC_MEAN' in readmes2
        assert 'ARIAS' in imc_tables2['ARITHMETIC_MEAN']
        testarray = readmes2['ARITHMETIC_MEAN']['Column header'].to_numpy()
        assert 'ARIAS' in testarray
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
Ejemplo n.º 16
0
def test_to_dataframe():
    cwb_files, event = read_data_dir('geonet', 'nz2018p115908')
    st = read_data(cwb_files[0])[0]
    df1 = streams_to_dataframe([st, st], event=event)
    np.testing.assert_array_equal(df1.STATION.tolist(), ['WPWS', 'WPWS'])
    np.testing.assert_array_equal(df1.NAME.tolist(),
                                  ['Waipawa_District_Council', 'Waipawa_District_Council'])
    target_levels = ['ELEVATION', 'EPICENTRAL_DISTANCE',
                     'GREATER_OF_TWO_HORIZONTALS', 'H1', 'H2', 'Z',
                     'HYPOCENTRAL_DISTANCE', 'LAT', 'LON', 'NAME', 'NETID', 'SOURCE',
                     'STATION', '', 'PGA', 'PGV', 'SA(0.3)', 'SA(1.0)', 'SA(3.0)']

    # let's use sets to make sure all the columns are present in whatever order
    cmp1 = set(['ELEVATION', 'EPICENTRAL_DISTANCE',
                'GREATER_OF_TWO_HORIZONTALS', 'H1', 'H2',
                'HYPOCENTRAL_DISTANCE', 'LAT', 'LON',
                'NAME', 'NETID', 'SOURCE', 'STATION', 'Z'])
    cmp2 = set(['', 'PGA', 'PGV', 'SA(0.3)', 'SA(1.0)', 'SA(3.0)'])
    header1 = set(df1.columns.levels[0])
    header2 = set(df1.columns.levels[1])
    assert header1 == cmp1
    assert header2 == cmp2
    # idx = 0
    # for s in df1.columns.levels:
    #     for col in s:
    #         try:
    #             assert col == target_levels[idx]
    #         except Exception as e:
    #             x = 1
    #         idx += 1

    # This was previously not being tested
    """imts = ['PGA', 'PGV', 'SA(0.3)', 'SA(1.0)', 'SA(3.0)']
Ejemplo n.º 17
0
def test_sa():
    datafiles, _ = read_data_dir('geonet', 'us1000778i',
                                 '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    sa_target = {}
    for trace in stream_v2:
        vtrace = trace.copy()
        vtrace.integrate()
        sa_target[vtrace.stats['channel']] = np.abs(vtrace.max())
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(stream_v2, [
            'greater_of_two_horizontals', 'geometric_mean', 'rotd50',
            'arithmetic_mean', 'rotd100', 'gmrotd50', 'channels'
        ], ['sa1.0', 'saincorrect'])
    pgms = station_summary.pgms
    assert 'SA(1.000)' in pgms.IMT.tolist()
    np.testing.assert_allclose(
        pgms[pgms['IMC'] == 'ARITHMETIC_MEAN'].Result.iloc[0],
        110.47168962900042)
    np.testing.assert_allclose(
        pgms[pgms['IMC'] == 'GEOMETRIC_MEAN'].Result.iloc[0],
        107.42183990654802)
    np.testing.assert_allclose(
        pgms[pgms['IMC'] == 'ROTD(50.0)'].Result.iloc[0], 106.03202302692158)
    np.testing.assert_allclose(
        pgms[pgms['IMC'] == 'ROTD(100.0)'].Result.iloc[0], 146.90233501240979)
def test_get_peak_time():
    datafiles, _ = read_data_dir('geonet', 'us1000778i',
                                 '20161113_110259_WTMC_20.V2A')
    datafile = datafiles[0]
    stream1 = read_geonet(datafile)[0]
    max_cls = Max(stream1).result
    assert len(max_cls) == 2

    max_cls = Max({'chan': [0, 1, 2, 3]}).result
    assert len(max_cls) == 1

    stream2 = read_geonet(datafile)[0]
    origin = Origin(latitude=42.6925, longitude=173.021944)
    stream_summary = StationSummary.from_stream(stream2, ['channels'],
                                                ['pgv', 'pga'], origin)
    assert stream2[0].stats.pga_time == UTCDateTime(
        '2016-11-13T11:03:08.880001Z')
    assert stream2[0].stats.pgv_time == UTCDateTime(
        '2016-11-13T11:03:10.580001Z')

    assert stream2[1].stats.pga_time == UTCDateTime(
        '2016-11-13T11:03:09.960001Z')
    assert stream2[1].stats.pgv_time == UTCDateTime(
        '2016-11-13T11:03:08.860001Z')

    assert stream2[2].stats.pga_time == UTCDateTime(
        '2016-11-13T11:03:08.140001Z')
    assert stream2[2].stats.pgv_time == UTCDateTime(
        '2016-11-13T11:03:09.560001Z')
def test_metrics2():
    eventid = 'usb000syza'
    datafiles, event = read_data_dir('knet', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = get_config()
    config['metrics']['output_imts'].append('Arias')
    config['metrics']['output_imcs'].append('arithmetic_mean')
    # turn off sta/lta check and snr checks
    newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    processed_streams = process_streams(raw_streams, event, config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, processed_streams, label='processed')
        workspace.calcMetrics(event.id, labels=['processed'])
        etable, imc_tables1 = workspace.getTables('processed')
        etable2, imc_tables2 = workspace.getTables('processed', config=config)
        assert 'ARITHMETIC_MEAN' not in imc_tables1
        assert 'ARITHMETIC_MEAN' in imc_tables2
        assert 'ARIAS' in imc_tables2['ARITHMETIC_MEAN']
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
Ejemplo n.º 20
0
def test_pgv():
    datafiles, _ = read_data_dir('geonet', 'us1000778i',
                                 '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    pgv_target = {}
    for trace in stream_v2:
        vtrace = trace.copy()
        vtrace.integrate()
        pgv_target[vtrace.stats['channel']] = np.abs(vtrace.max())

    # we've replaced HN1 etc. with H1 so channel names are not the same as
    # the original trace
    pgv_target['H1'] = pgv_target['HN1']
    pgv_target['H2'] = pgv_target['HN2']
    pgv_target['Z'] = pgv_target['HNZ']

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(
            stream_v2, ['channels', 'greater_of_two_horizontals', 'gmrotd50'],
            ['pgv', 'sa1.0', 'saincorrect'])
    pgv_df = station_summary.pgms.loc['PGV']
    HN1 = pgv_df.loc['H1'].Result
    HN2 = pgv_df.loc['H2'].Result
    HNZ = pgv_df.loc['Z'].Result
    np.testing.assert_almost_equal(HN2, pgv_target['H2'])
    np.testing.assert_almost_equal(HN1, pgv_target['H1'])
    np.testing.assert_almost_equal(HNZ, pgv_target['Z'])
def test_nnet():

    conf = get_config()

    update = {
        'processing': [
            {'detrend': {'detrending_method': 'demean'}},
            # {'check_zero_crossings': {'min_crossings': 10}},
            {'detrend': {'detrending_method': 'linear'}},
            {'compute_snr': {'bandwidth': 20.0,
                             'check': {'max_freq': 5.0,
                                       'min_freq': 0.2,
                                       'threshold': 3.0}}},
            {'NNet_QA': {'acceptance_threshold': 0.5,
                         'model_name': 'CantWell'}}
        ]
    }
    update_dict(conf, update)

    data_files, origin = read_data_dir('geonet', 'us1000778i', '*.V1A')
    streams = []
    for f in data_files:
        streams += read_data(f)

    sc = StreamCollection(streams)
    test = process_streams(sc, origin, conf)
    tstream = test.select(station='HSES')[0]
    allparams = tstream.getStreamParamKeys()
    nnet_dict = tstream.getStreamParam('nnet_qa')
    np.testing.assert_allclose(
        nnet_dict['score_HQ'], 0.99321798811740059, rtol=1e-3)
Ejemplo n.º 22
0
def test_nsmn():
    datafiles, origin = read_data_dir('nsmn', 'us20009ynd')

    # make sure format checker works
    assert is_nsmn(datafiles[0])

    raw_streams = []
    for dfile in datafiles:
        raw_streams += read_nsmn(dfile)

    peaks = {
        '0921': (13.200332, 12.163827, 9.840572),
        '4304': (1.218825, 1.207812, 0.645862),
        '5405': (1.023915, 1.107856, 0.385138)
    }

    coords = {
        '0921': (37.87470, 27.59223),
        '4304': (38.99478, 29.40040),
        '5405': (40.79609, 30.73520)
    }

    for stream in raw_streams:
        cmp_value = peaks[stream[0].stats.station]
        pga1 = np.abs(stream[0].max())
        pga2 = np.abs(stream[1].max())
        pga3 = np.abs(stream[2].max())
        tpl = (pga1, pga2, pga3)
        np.testing.assert_almost_equal(cmp_value, tpl)
        cmp_coords = coords[stream[0].stats.station]
        tpl = (stream[0].stats['coordinates']['latitude'],
               stream[0].stats['coordinates']['longitude'])
        np.testing.assert_almost_equal(cmp_coords, tpl)
def test_process_streams():
    # Loma Prieta test station (nc216859)

    data_files, origin = read_data_dir('geonet', 'us1000778i', '*.V1A')
    streams = []
    for f in data_files:
        streams += read_data(f)

    sc = StreamCollection(streams)

    sc.describe()

    test = process_streams(sc, origin)

    logging.info('Testing trace: %s' % test[0][1])

    assert len(test) == 3
    assert len(test[0]) == 3
    assert len(test[1]) == 3
    assert len(test[2]) == 3

    # Apparently the traces end up in a different order on the Travis linux
    # container than on my local mac. So testing individual traces need to
    # not care about trace order.

    trace_maxes = np.sort([np.max(np.abs(t.data)) for t in test[0]])

    np.testing.assert_allclose(
        trace_maxes,
        np.array([157.81975508, 240.33718094, 263.67804256]),
        rtol=1e-5
    )
Ejemplo n.º 24
0
def test():
    dpath = os.path.join('data', 'testdata', 'knet', 'us2000cnnl')
    datadir = pkg_resources.resource_filename('gmprocess', dpath)

    knet_file1 = os.path.join(datadir, 'AOM0051801241951.EW')
    knet_file2 = os.path.join(datadir, 'AOM0051801241951.NS')
    knet_file3 = os.path.join(datadir, 'AOM0051801241951.UD')
    assert is_knet(knet_file1)
    assert is_knet(os.path.abspath(__file__)) is False

    # test a knet file with npoints % 10 == 0
    stream1 = read_knet(knet_file1)[0]
    stream2 = read_knet(knet_file2)[0]
    stream3 = read_knet(knet_file3)[0]
    np.testing.assert_almost_equal(stream1[0].max(), -37.149, decimal=2)
    np.testing.assert_almost_equal(stream2[0].max(), 32.859, decimal=2)
    np.testing.assert_almost_equal(stream3[0].max(), 49.000, decimal=2)

    # test a file that has a number of points divisible by 8
    knet_file4 = os.path.join(datadir, 'AOM0011801241951.EW')
    knet_file5 = os.path.join(datadir, 'AOM0011801241951.NS')
    knet_file6 = os.path.join(datadir, 'AOM0011801241951.UD')
    stream4 = read_knet(knet_file4)[0]
    stream5 = read_knet(knet_file5)[0]
    stream6 = read_knet(knet_file6)[0]
    np.testing.assert_almost_equal(stream4[0].max(), -11.435, decimal=2)
    np.testing.assert_almost_equal(stream5[0].max(), 12.412, decimal=2)
    np.testing.assert_almost_equal(stream6[0].max(), -9.284, decimal=2)

    # test that a file that is not knet format raises an Exception
    try:
        knet_files, _ = read_data_dir('geonet', 'nz2018p115908',
                                      '20161113_110256_WTMC_20.V1A')

        knet_file = knet_files[0]
        read_knet(knet_file)[0]
        success = True
    except Exception:
        success = False
    assert not success

    # test some kiknet files
    dpath = os.path.join('data', 'testdata', 'kiknet', 'usp000a1b0')
    datadir = pkg_resources.resource_filename('gmprocess', dpath)
    kiknet_file1 = os.path.join(datadir, 'AICH040010061330.EW2')
    kiknet_file2 = os.path.join(datadir, 'AICH040010061330.NS2')
    kiknet_file3 = os.path.join(datadir, 'AICH040010061330.UD2')
    assert is_knet(knet_file1)
    stream1 = read_knet(kiknet_file1)[0]  # east-west
    stream2 = read_knet(kiknet_file2)[0]  # north-south
    stream3 = read_knet(kiknet_file3)[0]  # vertical
    assert stream1[0].stats['channel'] == 'HN2'
    assert stream2[0].stats['channel'] == 'HN1'
    assert stream3[0].stats['channel'] == 'HNZ'
    ewmax = np.abs(stream1[0].data).max()
    nsmax = np.abs(stream2[0].data).max()
    udmax = np.abs(stream3[0].data).max()
    np.testing.assert_almost_equal(ewmax, 5.020, decimal=1)
    np.testing.assert_almost_equal(nsmax, 10.749, decimal=1)
    np.testing.assert_almost_equal(udmax, 9.111, decimal=1)
Ejemplo n.º 25
0
def test_signal_split2():
    datafiles, origin = read_data_dir('knet', 'us2000cnnl',
                                      'AOM0011801241951*')
    streams = []
    for datafile in datafiles:
        streams += read_data(datafile)

    streams = StreamCollection(streams)
    stream = streams[0]
    signal_split(stream, origin)

    cmpdict = {
        'split_time': UTCDateTime(2018, 1, 24, 10, 51, 39, 841483),
        'method': 'p_arrival',
        'picker_type': 'travel_time'
    }

    pdict = stream[0].getParameter('signal_split')
    for key, value in cmpdict.items():
        v1 = pdict[key]
        # because I can't figure out how to get utcdattime __eq__
        # operator to behave as expected with the currently installed
        # version of obspy, we're going to pedantically compare two
        # of these objects...
        if isinstance(value, UTCDateTime):
            #value.__precision = 4
            #v1.__precision = 4
            assert value.year == v1.year
            assert value.month == v1.month
            assert value.day == v1.day
            assert value.hour == v1.hour
            assert value.minute == v1.minute
            assert value.second == v1.second
        else:
            assert v1 == value
def test_nsmn():
    datafiles, origin = read_data_dir('nsmn', 'us20009ynd')

    # make sure format checker works
    assert is_nsmn(datafiles[0])

    raw_streams = []
    for dfile in datafiles:
        raw_streams += read_nsmn(dfile)

    peaks = {'0921': (13.200332, 12.163827, 9.840572),
             '4304': (1.218825, 1.207812, 0.645862),
             '5405': (1.023915, 1.107856, 0.385138)}

    coords = {'0921': (37.87470, 27.59223),
              '4304': (38.99478, 29.40040),
              '5405': (40.79609, 30.73520)}

    for stream in raw_streams:
        cmp_value = peaks[stream[0].stats.station]
        pga1 = np.abs(stream[0].max())
        pga2 = np.abs(stream[1].max())
        pga3 = np.abs(stream[2].max())
        tpl = (pga1, pga2, pga3)
        np.testing.assert_almost_equal(cmp_value, tpl)
        cmp_coords = coords[stream[0].stats.station]
        tpl = (stream[0].stats['coordinates']['latitude'],
               stream[0].stats['coordinates']['longitude'])
        np.testing.assert_almost_equal(cmp_coords, tpl)
Ejemplo n.º 27
0
def test_arias():
    ddir = os.path.join('data', 'testdata')
    datadir = pkg_resources.resource_filename('gmprocess', ddir)
    data_file = os.path.join(datadir, 'arias_data.json')
    with open(data_file, 'rt') as f:
        jdict = json.load(f)

    time = np.array(jdict['time'])
    # input output is m/s/s
    acc = np.array(jdict['acc']) / 100
    target_IA = jdict['ia']
    delta = time[2] - time[1]
    sr = 1 / delta
    header = {
        'delta': delta,
        'sampling_rate': sr,
        'npts': len(acc),
        'units': 'm/s/s',
        'channel': 'HN1',
        'standard': {
            'corner_frequency': np.nan,
            'station_name': '',
            'source': 'json',
            'source_file': '',
            'instrument': '',
            'instrument_period': np.nan,
            'source_format': 'json',
            'comments': '',
            'structure_type': '',
            'sensor_serial_number': '',
            'process_level': 'raw counts',
            'process_time': '',
            'horizontal_orientation': np.nan,
            'units': 'acc',
            'instrument_damping': np.nan
        }
    }
    # input is cm/s/s output is m/s/s
    trace = StationTrace(data=acc * 100, header=header)
    trace2 = trace.copy()
    trace2.stats.channel = 'HN2'
    stream = StationStream([trace, trace2])
    station = StationSummary.from_stream(stream, ['ARITHMETIC_MEAN'],
                                         ['arias'])
    pgms = station.pgms
    Ia = pgms[(pgms.IMT == 'ARIAS')
              & (pgms.IMC == 'ARITHMETIC_MEAN')].Result.tolist()[0]
    # the target has only one decimal place and is in cm/s/s
    Ia = Ia * 100
    np.testing.assert_almost_equal(Ia, target_IA, decimal=1)

    # Test other components
    data_files, _ = read_data_dir('cwb', 'us1000chhc', '2-ECU.dat')
    stream = read_data(data_files[0])[0]
    station = StationSummary.from_stream(stream, [
        'channels', 'gmrotd', 'rotd50', 'greater_of_two_horizontals',
        'ARITHMETIC_MEAN'
    ], ['arias'])
    stream = StationSummary.from_stream(stream, ['gmrotd50'], ['arias'])
    assert stream.pgms.Result.tolist() == []
Ejemplo n.º 28
0
def test():
    dpath = os.path.join('data', 'testdata', 'esm', 'us60004wsq')
    datadir = pkg_resources.resource_filename('gmprocess', dpath)

    esm_file1 = os.path.join(datadir,
                             'HI.ARS1..HNE.D.20190728.160908.C.ACC.ASC')
    esm_file2 = os.path.join(datadir,
                             'HI.ARS1..HNN.D.20190728.160908.C.ACC.ASC')
    esm_file3 = os.path.join(datadir,
                             'HI.ARS1..HNZ.D.20190728.160908.C.ACC.ASC')
    assert is_esm(esm_file1)
    try:
        assert is_esm(os.path.abspath(__file__))
    except AssertionError:
        assert 1 == 1

    # test a esm file with npoints % 10 == 0
    stream1 = read_esm(esm_file1)[0]
    stream2 = read_esm(esm_file2)[0]
    stream3 = read_esm(esm_file3)[0]
    np.testing.assert_almost_equal(stream1[0].max(), 0.300022, decimal=2)
    np.testing.assert_almost_equal(stream2[0].max(), 0.359017, decimal=2)
    np.testing.assert_almost_equal(stream3[0].max(), 0.202093, decimal=2)

    # test that a file that is not esm format raises an Exception
    try:
        esm_files, _ = read_data_dir('geonet', 'nz2018p115908',
                                     '20161113_110256_WTMC_20.V1A')

        esm_file = esm_files[0]
        read_esm(esm_file)[0]
        success = True
    except Exception:
        success = False
    assert not success
def test_asdf():
    eventid = 'us1000778i'
    datafiles, event = read_data_dir('geonet', eventid, '*.V1A')
    tdir = tempfile.mkdtemp()
    try:
        config = get_config()
        tfile = os.path.join(tdir, 'test.hdf')
        raw_streams = []
        for dfile in datafiles:
            raw_streams += read_data(dfile)

        write_asdf(tfile, raw_streams, event)

        assert is_asdf(tfile)
        assert not is_asdf(datafiles[0])

        outstreams = read_asdf(tfile)
        assert len(outstreams) == len(raw_streams)

        write_asdf(tfile, raw_streams, event, label='foo')
        outstreams2 = read_asdf(tfile, label='foo')
        assert len(outstreams2) == len(raw_streams)

    except Exception:
        assert 1 == 2
    finally:
        shutil.rmtree(tdir)
Ejemplo n.º 30
0
def test_pga():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(
            stream_v2,
            ['channels', 'greater_of_two_horizontals', 'gmrotd50',
             'gmrotd100', 'gmrotd0', 'rotd50', 'geometric_mean',
             'arithmetic_mean'],
            ['pga', 'sa1.0', 'saincorrect'])
    pga_df = station_summary.pgms[station_summary.pgms.IMT == 'PGA']
    AM = pga_df[pga_df.IMC == 'ARITHMETIC_MEAN'].Result.iloc[0]
    GM = pga_df[pga_df.IMC == 'GEOMETRIC_MEAN'].Result.iloc[0]
    HN1 = pga_df[pga_df.IMC == 'H1'].Result.iloc[0]
    HN2 = pga_df[pga_df.IMC == 'H2'].Result.iloc[0]
    HNZ = pga_df[pga_df.IMC == 'Z'].Result.iloc[0]
    greater = pga_df[pga_df.IMC == 'GREATER_OF_TWO_HORIZONTALS'].Result.iloc[0]
    np.testing.assert_allclose(
        AM, 90.242335558014219, rtol=1e-3)
    np.testing.assert_allclose(
        GM, 89.791654017670112, rtol=1e-3)
    np.testing.assert_allclose(
        HN2, 81.234672390673683, rtol=1e-3)
    np.testing.assert_allclose(
        HN1, 99.249998725354743, rtol=1e-3)
    np.testing.assert_almost_equal(
        HNZ, 183.77223618666929, decimal=1)
    np.testing.assert_allclose(
        greater, 99.249998725354743, rtol=1e-3)
Ejemplo n.º 31
0
def test_process_streams():
    # Loma Prieta test station (nc216859)

    data_files, origin = read_data_dir('geonet', 'us1000778i', '*.V1A')
    streams = []
    for f in data_files:
        streams += read_data(f)

    sc = StreamCollection(streams)

    sc.describe()

    config = update_config(os.path.join(datadir, 'config_min_freq_0p2.yml'))

    test = process_streams(sc, origin, config=config)

    logging.info('Testing trace: %s' % test[0][1])

    assert len(test) == 3
    assert len(test[0]) == 3
    assert len(test[1]) == 3
    assert len(test[2]) == 3

    # Apparently the traces end up in a different order on the Travis linux
    # container than on my local mac. So testing individual traces need to
    # not care about trace order.

    trace_maxes = np.sort(
        [np.max(np.abs(t.data)) for t in test.select(station='HSES')[0]])

    np.testing.assert_allclose(trace_maxes,
                               np.array(
                                   [157.81975508, 240.33718094, 263.67804256]),
                               rtol=1e-5)
Ejemplo n.º 32
0
def test_asdf():
    eventid = 'us1000778i'
    datafiles, event = read_data_dir('geonet', eventid, '*.V1A')
    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        raw_streams = []
        for dfile in datafiles:
            raw_streams += read_data(dfile)

        write_asdf(tfile, raw_streams, event)

        assert is_asdf(tfile)
        assert not is_asdf(datafiles[0])

        outstreams = read_asdf(tfile)
        assert len(outstreams) == len(raw_streams)

        write_asdf(tfile, raw_streams, event, label='foo')
        outstreams2 = read_asdf(tfile, label='foo')
        assert len(outstreams2) == len(raw_streams)

    except Exception as e:
        raise(e)
    finally:
        shutil.rmtree(tdir)
Ejemplo n.º 33
0
def test_asdf():
    eventid = 'us1000778i'
    datafiles, origin = read_data_dir('geonet', eventid, '*.V1A')
    event = get_event_object(origin)
    tdir = tempfile.mkdtemp()
    try:
        config = get_config()
        tfile = os.path.join(tdir, 'test.hdf')
        raw_streams = []
        for dfile in datafiles:
            raw_streams += read_data(dfile)

        write_asdf(tfile, raw_streams, event)

        assert is_asdf(tfile)
        assert not is_asdf(datafiles[0])

        outstreams = read_asdf(tfile)
        assert len(outstreams) == len(raw_streams)

        write_asdf(tfile, raw_streams, event, label='foo')
        outstreams2 = read_asdf(tfile, label='foo')
        assert len(outstreams2) == len(raw_streams)

    except Exception:
        assert 1 == 2
    finally:
        shutil.rmtree(tdir)
def test_channel_in_filename():
    datafiles, origin = read_data_dir('cosmos', 'us1000hyfh')
    dfile = datafiles[0]
    # TODO: Fix this problem, or get the data fixed?
    try:
        streams = read_cosmos(dfile)
    except:
        assert 1 == 1
def test_exceptions():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    stream1 = stream_v2.select(channel="HN1")
    pgms = StationSummary.from_stream(stream1, ['rotd50'], ['pga']).pgms
    assert np.isnan(pgms[(pgms.IMT == 'PGA') & (pgms.IMC == 'ROTD(50.0)')].Result.iloc[0])
def test_velocity():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    acc_file = datafiles[0]
    acc = read_data(acc_file)[0]
    target_v = acc.copy().integrate()[0]
    v = get_velocity(acc)
    np.testing.assert_allclose(v[0], target_v)
Ejemplo n.º 37
0
def test_channel_in_filename():
    datafiles, origin = read_data_dir('cosmos', 'us1000hyfh')
    dfile = datafiles[0]
    # TODO: Fix this problem, or get the data fixed?
    try:
        streams = read_cosmos(dfile)
    except:
        assert 1 == 1
Ejemplo n.º 38
0
def test_exceptions():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    stream1 = stream_v2.select(channel="HN1")
    pgms = StationSummary.from_stream(stream1, ['rotd50'], ['pga']).pgms
    assert np.isnan(pgms.loc['PGA', 'ROTD(50.0)'].Result)
def test_arias():
    ddir = os.path.join('data', 'testdata')
    datadir = pkg_resources.resource_filename('gmprocess', ddir)
    data_file = os.path.join(datadir, 'arias_data.json')
    with open(data_file, 'rt') as f:
        jdict = json.load(f)

    time = np.array(jdict['time'])
    # input output is m/s/s
    acc = np.array(jdict['acc']) / 100
    target_IA = jdict['ia']
    delta = time[2] - time[1]
    sr = 1 / delta
    header = {
        'delta': delta,
        'sampling_rate': sr,
        'npts': len(acc),
        'units': 'm/s/s',
        'channel': 'HN1',
        'standard': {'corner_frequency': np.nan,
            'station_name': '',
            'source': 'json',
            'source_file': '',
            'instrument': '',
            'instrument_period': np.nan,
            'source_format': 'json',
            'comments': '',
            'structure_type': '',
            'sensor_serial_number': '',
            'process_level': 'raw counts',
            'process_time': '',
            'horizontal_orientation': np.nan,
            'units': 'acc',
            'instrument_damping': np.nan}
    }
    # input is cm/s/s output is m/s/s
    trace = StationTrace(data=acc * 100, header=header)
    trace2 = trace.copy()
    trace2.stats.channel = 'HN2'
    stream = StationStream([trace, trace2])
    station = StationSummary.from_stream(stream, ['ARITHMETIC_MEAN'], ['arias'])
    pgms = station.pgms
    Ia = pgms[(pgms.IMT == 'ARIAS') & (pgms.IMC == 'ARITHMETIC_MEAN')].Result.tolist()[0]
    # the target has only one decimal place and is in cm/s/s
    Ia = Ia * 100
    np.testing.assert_almost_equal(Ia, target_IA, decimal=1)

    # Test other components
    data_files, _ = read_data_dir('cwb', 'us1000chhc', '2-ECU.dat')
    stream = read_data(data_files[0])[0]
    station = StationSummary.from_stream(stream,
                                         ['channels', 'gmrotd', 'rotd50',
                                             'greater_of_two_horizontals', 'ARITHMETIC_MEAN'],
                                         ['arias'])
    stream = StationSummary.from_stream(stream, ['gmrotd50'], ['arias'])
    assert stream.pgms.Result.tolist() == []
def test_greater_of_two_horizontals():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    station_summary = StationSummary.from_stream(stream_v2,
                                                 ['greater_of_two_horizontals'], ['pga'])
    station = station_summary.pgms[station_summary.pgms.IMT == 'PGA']
    greater = station[station.IMC == 'GREATER_OF_TWO_HORIZONTALS'].Result.iloc[0]
    np.testing.assert_almost_equal(greater, 99.3173469387755, decimal=1)
def test_gmrotd():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]

    stream_v2 = read_geonet(datafile_v2)[0]
    station_summary = StationSummary.from_stream(stream_v2,
                                                 ['gmrotd0', 'gmrotd50',
                                                  'gmrotd100'], ['pga'])
    pgms = station_summary.pgms
    assert 'GMROTD(50.0)' in pgms.IMC.tolist()
def test_get_nga_record_sequence_no():
    datafiles, _ = read_data_dir('usc', 'ci3144585', '017m30cc.y0a')
    st = read_data(datafiles[0])[0]

    # Test when a single record is found
    assert get_nga_record_sequence_no(st, 'Northridge-01') == 960

    # Test when no records are found
    assert np.isnan(get_nga_record_sequence_no(st, 'Northridge-01', 1))

    # Test when multiple records are found
    assert np.isnan(get_nga_record_sequence_no(st, 'Northridge-01', 10000))
def test_travel_time():
    datafiles, origin = read_data_dir('geonet', 'us1000778i', '*.V1A')
    streams = []
    for datafile in datafiles:
        streams += read_data(datafile)

    cmps = {'NZ.HSES.HN': 42.118045132851641,
            'NZ.WTMC.HN': 40.77244584723671,
            'NZ.THZ.HN': 42.025007954412246}
    for stream in streams:
        minloc, mean_snr = pick_travel(stream, origin)
        np.testing.assert_almost_equal(minloc, cmps[stream.get_id()])
def test_exceptions():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    stream1 = stream_v2.select(channel="HN1")
    pgms = StationSummary.from_stream(stream1, ['gmrotd50'], ['pga']).pgms
    assert np.isnan(pgms.Result.iloc[0])

    for trace in stream_v2:
        stream1.append(trace)
    pgms = StationSummary.from_stream(stream1, ['gmrotd50'], ['pga']).pgms
    assert np.isnan(pgms.Result.iloc[0])
def test():
    homedir = os.path.dirname(os.path.abspath(
        __file__))  # where is this script?

    # Test for channel grouping with three unique channels
    streams = []
    # datadir = os.path.join(homedir, '..', 'data', 'knet', 'us2000cnnl')
    datafiles, origin = read_data_dir('knet', 'us2000cnnl',
                                      'AOM0031801241951*')
    for datafile in datafiles:
        streams += read_knet(datafile)
    grouped_streams = StreamCollection(streams)
    assert len(grouped_streams) == 1
    assert grouped_streams[0].count() == 3

    # Test for channel grouping with more file types
    datafiles, origin = read_data_dir('geonet',
                                      'us1000778i',
                                      '20161113_110313_THZ_20.V2A')
    datafile = datafiles[0]
    streams += read_geonet(datafile)
    grouped_streams = StreamCollection(streams)
    assert len(grouped_streams) == 2
    assert grouped_streams[0].count() == 3
    assert grouped_streams[1].count() == 3

    # Test for warning for one channel streams
    datafiles, origin = read_data_dir(
        'knet', 'us2000cnnl', 'AOM0071801241951.UD')
    datafile = datafiles[0]
    streams += read_knet(datafile)

    grouped_streams = StreamCollection(streams)
#    assert "One channel stream:" in logstream.getvalue()

    assert len(grouped_streams) == 3
    assert grouped_streams[0].count() == 3
    assert grouped_streams[1].count() == 3
    assert grouped_streams[2].count() == 1
def test_dmg_non_spec():
    # where is this script?
    file1, _ = read_data_dir('dmg', 'ci3031425', files=[
                             'ce23583r_HESPERIA.RAW'])
    file1 = file1[0]
    assert is_dmg(file1)
    stream = read_dmg(file1)[0]
    trace1 = stream[0]
    # Data is in g not gal so it must be scaled by 980.665
    np.testing.assert_almost_equal(
        trace1.data[0], -0.000116 * UNIT_CONVERSIONS['g'])
    np.testing.assert_almost_equal(
        trace1.data[-8], -0.003018 * UNIT_CONVERSIONS['g'])
def test_channels():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    station_summary = StationSummary.from_stream(stream_v2,
                                                 ['channels'], ['pga'])
    channel = station_summary.pgms[station_summary.pgms.IMT == 'PGA']
    np.testing.assert_almost_equal(
        channel[channel.IMC == 'HN2'].Result.iloc[0], 81.28979591836733, decimal=1)
    np.testing.assert_almost_equal(
        channel[channel.IMC == 'HN1'].Result.iloc[0], 99.3173469387755, decimal=1)
    np.testing.assert_almost_equal(
        channel[channel.IMC == 'HNZ'].Result.iloc[0], 183.89693877551022, decimal=1)
def test_metrics():
    eventid = 'usb000syza'
    datafiles, event = read_data_dir('knet',
                                     eventid,
                                     '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = get_config()
    # turn off sta/lta check and snr checks
    newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    processed_streams = process_streams(raw_streams, event, config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, processed_streams, label='processed')
        stream1 = processed_streams[0]
        stream2 = processed_streams[1]
        summary1 = StationSummary.from_config(stream1)
        summary2 = StationSummary.from_config(stream2)
        workspace.setStreamMetrics(event.id, 'processed', summary1)
        workspace.setStreamMetrics(event.id, 'processed', summary2)
        summary1_a = workspace.getStreamMetrics(event.id,
                                                stream1[0].stats.station,
                                                'processed')
        s1_df_in = summary1.pgms.sort_values(['IMT', 'IMC'])
        s1_df_out = summary1_a.pgms.sort_values(['IMT', 'IMC'])
        array1 = s1_df_in['Result'].as_matrix()
        array2 = s1_df_out['Result'].as_matrix()
        np.testing.assert_almost_equal(array1, array2, decimal=4)

        df = workspace.getMetricsTable(event.id)
        cmp_series = {'GREATER_OF_TWO_HORIZONTALS': 0.6787,
                      'HN1': 0.3869,
                      'HN2': 0.6787,
                      'HNZ': 0.7663}
        pga_dict = df.iloc[0]['PGA'].to_dict()
        for key, value in pga_dict.items():
            value2 = cmp_series[key]
            np.testing.assert_almost_equal(value, value2, decimal=4)

        workspace.close()
    except Exception as e:
        raise(e)
    finally:
        shutil.rmtree(tdir)
def test_dmg_v1():
    file1, _ = read_data_dir('dmg', 'ci3144585', files=[
                             'LA116TH.RAW'])
    file1 = file1[0]
    assert is_dmg(file1)

    stream1 = read_dmg(file1)[0]
    assert stream1.count() == 3

    # test that the traces are acceleration
    for trace in stream1:
        assert trace.stats['standard']['units'] == 'acc'

    # test metadata
    for trace in stream1:
        stats = trace.stats
        assert stats['station'] == '14403'
        assert stats['delta'] == .005
        assert stats['location'] == '--'
        assert stats['network'] == 'ZZ'
        dt = '%Y-%m-%dT%H:%M:%SZ'
        assert stats['starttime'].strftime(dt) == '1994-01-17T12:31:04Z'
        assert stats.coordinates['latitude'] == 33.929
        assert stats.coordinates['longitude'] == -118.26
        assert stats.standard['station_name'] == 'LOS ANGELES - 116TH ST. SCHOOL'
        assert stats.standard['instrument'] == 'SMA-1'
        assert stats.standard['sensor_serial_number'] == '3492'
        if stats['channel'] == 'HN1':
            assert stats.format_specific['sensor_sensitivity'] == 1.915
            assert stats.standard['horizontal_orientation'] == 360
            assert stats.standard['instrument_period'] == .038
            assert stats.standard['instrument_damping'] == .59
            assert stats.format_specific['time_sd'] == 0.115
        if stats['channel'] == 'HN2':
            assert stats.standard['horizontal_orientation'] == 90
            assert stats.standard['instrument_period'] == 0.04
            assert stats.standard['instrument_damping'] == 0.592
            assert stats.format_specific['time_sd'] == 0.12
        if stats['channel'] == 'HNZ':
            assert stats.standard['horizontal_orientation'] == 0.0
            assert stats.standard['instrument_period'] == 0.039
            assert stats.standard['instrument_damping'] == 0.556
            assert stats.format_specific['time_sd'] == 0.114
        assert stats.standard['process_level'] == PROCESS_LEVELS['V2']
        assert stats.standard['source_format'] == 'dmg'
        assert stats.standard['source'] == 'unknown'
def test_read():
    cosmos_files, _ = read_data_dir('cosmos',
                                    'ci14155260',
                                    'Cosmos12TimeSeriesTest.v1')
    cwb_files, _ = read_data_dir('cwb',
                                 'us1000chhc',
                                 '1-EAS.dat')
    dmg_files, _ = read_data_dir('dmg',
                                 'nc71734741',
                                 'CE89146.V2')
    geonet_files, _ = read_data_dir('geonet',
                                    'us1000778i',
                                    '20161113_110259_WTMC_20.V1A')
    knet_files, _ = read_data_dir('knet',
                                  'us2000cnnl',
                                  'AOM0011801241951.EW')
    smc_files, _ = read_data_dir('smc',
                                 'nc216859',
                                 '0111a.smc')

    file_dict = {}
    file_dict['cosmos'] = cosmos_files[0]
    file_dict['cwb'] = cwb_files[0]
    file_dict['dmg'] = dmg_files[0]
    file_dict['geonet'] = geonet_files[0]
    file_dict['knet'] = knet_files[0]
    file_dict['smc'] = smc_files[0]

    for file_format in file_dict:
        file_path = file_dict[file_format]
        assert _get_format(file_path) == file_format
        assert _validate_format(file_path, file_format) == file_format

    assert _validate_format(file_dict['knet'], 'smc') == 'knet'
    assert _validate_format(file_dict['dmg'], 'cosmos') == 'dmg'
    assert _validate_format(file_dict['cosmos'], 'invalid') == 'cosmos'

    for file_format in file_dict:
        try:
            stream = read_data(file_dict[file_format], file_format)[0]
        except Exception as e:
            x = 1
        assert stream[0].stats.standard['source_format'] == file_format
        stream = read_data(file_dict[file_format])[0]
        assert stream[0].stats.standard['source_format'] == file_format
    # test exception
    try:
        file_path = smc_files[0].replace('0111a.smc', 'not_a_file.smc')
        read_data(file_path)[0]
        success = True
    except GMProcessException:
        success = False
    assert success == False
def test_end_to_end():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile = datafiles[0]

    target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS',
                                      'HN1', 'HN2', 'HNZ', 'ROTD50.0',
                                      'ROTD100.0']))
    target_imts = np.sort(np.asarray(['SA(1.0)', 'PGA', 'PGV']))
    stream = read_geonet(datafile)[0]
    input_imcs = ['greater_of_two_horizontals', 'channels', 'rotd50',
                  'rotd100', 'invalid']
    input_imts = ['sa1.0', 'PGA', 'pgv', 'invalid']
    m = MetricsController(input_imts, input_imcs, stream)
    test_pgms = [
        ('PGV', 'ROTD(100.0)', 114.24894584734818),
        ('PGV', 'ROTD(50.0)', 81.55436750525355),
        ('PGV', 'HNZ', 37.47740000000001),
        ('PGV', 'HN1', 100.81460000000004),
        ('PGV', 'HN2', 68.4354),
        ('PGV', 'GREATER_OF_TWO_HORIZONTALS', 100.81460000000004),
        ('PGA', 'ROTD(100.0)', 100.73875535385548),
        ('PGA', 'ROTD(50.0)', 91.40178541935455),
        ('PGA', 'HNZ', 183.7722361866693),
        ('PGA', 'HN1', 99.24999872535474),
        ('PGA', 'HN2', 81.23467239067368),
        ('PGA', 'GREATER_OF_TWO_HORIZONTALS', 99.24999872535474),
        ('SA(1.0)', 'ROTD(100.0)', 146.9023350124098),
        ('SA(1.0)', 'ROTD(50.0)', 106.03202302692158),
        ('SA(1.0)', 'HNZ', 27.74118995438756),
        ('SA(1.0)', 'HN1', 136.25041187387063),
        ('SA(1.0)', 'HN2', 84.69296738413021),
        ('SA(1.0)', 'GREATER_OF_TWO_HORIZONTALS', 136.25041187387063)
    ]
    pgms = m.pgms
    assert len(pgms['IMT'].tolist()) == len(test_pgms)
    for target in test_pgms:
        target_imt = target[0]
        target_imc = target[1]
        value = target[2]
        sub_imt = pgms.loc[pgms['IMT'] == target_imt]
        df = sub_imt.loc[sub_imt['IMC'] == target_imc]
        assert len(df['IMT'].tolist()) == 1
        np.testing.assert_array_almost_equal(df['Result'].tolist()[0], value,
                                             decimal=10)
def test_to_dataframe():
    cwb_files, event = read_data_dir('geonet', 'nz2018p115908')
    st = read_data(cwb_files[0])[0]
    df1 = streams_to_dataframe([st, st], event=event)
    np.testing.assert_array_equal(df1.STATION.tolist(), ['WPWS', 'WPWS'])
    np.testing.assert_array_equal(df1.NAME.tolist(),
                                  ['Waipawa_District_Council', 'Waipawa_District_Council'])
    target_levels = ['ELEVATION', 'EPICENTRAL_DISTANCE',
                     'GREATER_OF_TWO_HORIZONTALS', 'HN1', 'HN2', 'HNZ',
                     'HYPOCENTRAL_DISTANCE', 'LAT', 'LON', 'NAME', 'NETID', 'SOURCE',
                     'STATION', '', 'PGA', 'PGV', 'SA(0.3)', 'SA(1.0)', 'SA(3.0)']
    idx = 0
    for s in df1.columns.levels:
        for col in s:
            assert col == target_levels[idx]
            idx += 1

    # This was previously not being tested
    """imts = ['PGA', 'PGV', 'SA(0.3)', 'SA(1.0)', 'SA(3.0)']
def test_plot():
    # read in data
    datafiles, _ = read_data_dir('cwb', 'us1000chhc')
    streams = []
    for filename in datafiles:
        streams += read_data(filename)
    # One plot arias
    axes = plot_arias(streams[3])
    assert len(axes) == 3

    # Multiplot arias
    axs = matplotlib.pyplot.subplots(len(streams), 3, figsize=(15, 10))[1]
    axs = axs.flatten()
    idx = 0
    for stream in streams:
        axs = plot_arias(
            stream, axes=axs, axis_index=idx, minfontsize=15,
            show_maximum=False, title="18km NNE of Hualian, Taiwan")
        idx += 3

    # One plot durations
    durations = [(0.05, 0.75),
                 (0.2, 0.8),
                 (0.05, .95)]
    axes = plot_durations(streams[3], durations)
    assert len(axes) == 3

    # Multiplot durations
    axs = matplotlib.pyplot.subplots(len(streams), 3, figsize=(15, 10))[1]
    axs = axs.flatten()
    idx = 0
    for stream in streams:
        axs = plot_durations(
            stream, durations, axes=axs, axis_index=idx,
            minfontsize=15, title="18km NNE of Hualian, Taiwan")
        idx += 3

    # Moveout plots
    epicenter_lat = 24.14
    epicenter_lon = 121.69
    plot_moveout(streams, epicenter_lat, epicenter_lon, 'BN1',
                 cmap='nipy_spectral_r', figsize=(15, 10), minfontsize=16,
                 normalize=True, scale=10)
def test_acceleration():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    acc_file = datafiles[0]
    acc = read_data(acc_file)[0]
    target_g = acc[0].data * GAL_TO_PCTG
    target_m = acc[0].data / 100
    target_cm = acc[0].data

    acc_g = get_acceleration(acc, units='%%g')
    assert acc_g[0].stats['units'] == '%%g'
    np.testing.assert_allclose(acc_g[0], target_g)

    acc_m = get_acceleration(acc, units='m/s/s')
    assert acc_m[0].stats['units'] == 'm/s/s'
    np.testing.assert_allclose(acc_m[0], target_m)

    acc_cm = get_acceleration(acc, units='cm/s/s')
    assert acc_cm[0].stats['units'] == 'cm/s/s'
    np.testing.assert_allclose(acc_cm[0], target_cm)
def test_stream_params():
    eventid = 'us1000778i'
    datafiles, event = read_data_dir('geonet',
                                     eventid,
                                     '20161113_110259_WTMC_20.V1A')
    tdir = tempfile.mkdtemp()
    streams = []
    try:
        streams += read_data(datafiles[0])
        statsdict = {'name': 'Fred', 'age': 34}
        streams[0].setStreamParam('stats', statsdict)
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, streams, label='stats')
        outstreams = workspace.getStreams(event.id, labels=['stats'])
        cmpdict = outstreams[0].getStreamParam('stats')
        assert cmpdict == statsdict
        workspace.close()
    except Exception as e:
        raise(e)
    finally:
        shutil.rmtree(tdir)
def test_pga():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(stream_v2,
                                                     ['channels', 'greater_of_two_horizontals', 'gmrotd50',
                                                      'gmrotd100', 'gmrotd0'],
                                                     ['pga', 'sa1.0', 'saincorrect'])
    pga_df = station_summary.pgms[station_summary.pgms.IMT == 'PGA']
    HN1 = pga_df[pga_df.IMC == 'HN1'].Result.iloc[0]
    HN2 = pga_df[pga_df.IMC == 'HN2'].Result.iloc[0]
    HNZ = pga_df[pga_df.IMC == 'HNZ'].Result.iloc[0]
    greater = pga_df[pga_df.IMC == 'GREATER_OF_TWO_HORIZONTALS'].Result.iloc[0]
    np.testing.assert_almost_equal(
        HN2, 81.28979591836733, decimal=1)
    np.testing.assert_almost_equal(
        HN1, 99.3173469387755, decimal=1)
    np.testing.assert_almost_equal(
        HNZ, 183.89693877551022, decimal=1)
    np.testing.assert_almost_equal(greater, 99.3173469387755, decimal=1)
def test_pgv():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    pgv_target = {}
    for trace in stream_v2:
        vtrace = trace.copy()
        vtrace.integrate()
        pgv_target[vtrace.stats['channel']] = np.abs(vtrace.max())
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(stream_v2,
                                                     ['channels', 'greater_of_two_horizontals',
                                                         'gmrotd50'],
                                                     ['pgv', 'sa1.0', 'saincorrect'])
    pgv_df = station_summary.pgms[station_summary.pgms.IMT == 'PGV']
    HN1 = pgv_df[pgv_df.IMC == 'HN1'].Result.iloc[0]
    HN2 = pgv_df[pgv_df.IMC == 'HN2'].Result.iloc[0]
    HNZ = pgv_df[pgv_df.IMC == 'HNZ'].Result.iloc[0]
    np.testing.assert_almost_equal(HN2, pgv_target['HN2'])
    np.testing.assert_almost_equal(HN1, pgv_target['HN1'])
    np.testing.assert_almost_equal(HNZ, pgv_target['HNZ'])
def test_free_field():
    data_files, origin = read_data_dir('kiknet', 'usp000hzq8')
    raw_streams = []
    for dfile in data_files:
        raw_streams += read_data(dfile)

    sc = StreamCollection(raw_streams)

    processed_streams = process_streams(sc, origin)

    # all of these streams should have failed for different reasons
    npassed = np.sum([pstream.passed for pstream in processed_streams])
    assert npassed == 0
    for pstream in processed_streams:
        is_free = pstream[0].free_field
        reason = ''
        for trace in pstream:
            if trace.hasParameter('failure'):
                reason = trace.getParameter('failure')['reason']
                break
        if is_free:
            assert reason.startswith('Failed')
        else:
            assert reason == 'Failed free field sensor check.'
def test_corner_frequencies():
    # Default config has 'constant' corner frequency method, so the need
    # here is to force the 'snr' method.
    data_files, origin = read_data_dir('geonet', 'us1000778i', '*.V1A')
    streams = []
    for f in data_files:
        streams += read_data(f)

    sc = StreamCollection(streams)

    config = get_config()

    window_conf = config['windows']

    processed_streams = sc.copy()
    for st in processed_streams:
        if st.passed:
            # Estimate noise/signal split time
            event_time = origin.time
            event_lon = origin.longitude
            event_lat = origin.latitude
            st = signal_split(st, origin)

            # Estimate end of signal
            end_conf = window_conf['signal_end']
            event_mag = origin.magnitude
            print(st)
            st = signal_end(
                st,
                event_time=event_time,
                event_lon=event_lon,
                event_lat=event_lat,
                event_mag=event_mag,
                **end_conf
            )
            wcheck_conf = window_conf['window_checks']
            st = window_checks(
                st,
                min_noise_duration=wcheck_conf['min_noise_duration'],
                min_signal_duration=wcheck_conf['min_signal_duration']
            )

    pconfig = config['processing']

    # Run SNR check
    # I think we don't do this anymore.
    test = [
        d for d in pconfig if list(d.keys())[0] == 'compute_snr'
    ]
    snr_config = test[0]['compute_snr']
    for stream in processed_streams:
        stream = compute_snr(
            stream,
            **snr_config
        )

    # Run get_corner_frequencies
    test = [
        d for d in pconfig if list(d.keys())[0] == 'get_corner_frequencies'
    ]
    cf_config = test[0]['get_corner_frequencies']
    snr_config = cf_config['snr']

    lp = []
    hp = []
    for stream in processed_streams:
        if not stream.passed:
            continue
        stream = get_corner_frequencies(
            stream,
            method="snr",
            snr=snr_config
        )
        if stream[0].hasParameter('corner_frequencies'):
            cfdict = stream[0].getParameter('corner_frequencies')
            lp.append(cfdict['lowpass'])
            hp.append(cfdict['highpass'])
    np.testing.assert_allclose(
        np.sort(hp),
        [0.00751431, 0.01354455, 0.04250735],
        atol=1e-6
    )
def test_controller():
    datafiles, event = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile = datafiles[0]
    input_imts = ['pgv', 'pga', 'sa2', 'sa1.0', 'sa0.3',
                  'fas2', 'fas1.0', 'fas0.3', 'arias', 'invalid']
    input_imcs = ['rotd50', 'rotd100.0', 'gmrotd50', 'gmrotd100.0',
                  'radial_transverse', 'geometric_mean', 'arithmetic_mean', 'channels',
                  'greater_of_two_horizontals', 'invalid', 'quadratic_mean']
    stream_v2 = read_geonet(datafile)[0]

    # Testing for acceleration --------------------------
    m1 = MetricsController(input_imts, input_imcs, stream_v2, event=event)
    pgms = m1.pgms

    # testing for pga, pgv, sa
    target_imcs = ['ROTD(50.0)', 'ROTD(100.0)', 'GMROTD(50.0)',
                   'GMROTD(100.0)', 'HNR', 'HNT', 'GEOMETRIC_MEAN', 'ARITHMETIC_MEAN', 'HN1', 'HN2',
                   'HNZ', 'GREATER_OF_TWO_HORIZONTALS', 'QUADRATIC_MEAN']
    for col in ['PGA', 'PGV', 'SA(1.0)', 'SA(2.0)', 'SA(0.3)']:
        imt = pgms.loc[pgms['IMT'] == col]
        imcs = imt['IMC'].tolist()
        assert len(imcs) == len(target_imcs)
        np.testing.assert_array_equal(np.sort(imcs), np.sort(target_imcs))

    # testing for fas
    for col in ['FAS(1.0)', 'FAS(2.0)', 'FAS(0.3)']:
        imt = pgms.loc[pgms['IMT'] == col]
        imcs = imt['IMC'].tolist()
        assert len(imcs) == 3
        np.testing.assert_array_equal(np.sort(imcs), ['ARITHMETIC_MEAN',
                                                      'GEOMETRIC_MEAN', 'QUADRATIC_MEAN'])

    # testing for arias
    imt = pgms.loc[pgms['IMT'] == 'ARIAS']
    imcs = imt['IMC'].tolist()
    assert len(imcs) == 1
    np.testing.assert_array_equal(np.sort(imcs), ['ARITHMETIC_MEAN'])
    _validate_steps(m1.step_sets, 'acc')

    # Testing for Velocity --------------------------
    for trace in stream_v2:
        trace.stats.standard.units = 'vel'
    m = MetricsController(input_imts, input_imcs, stream_v2, event=event)
    pgms = m.pgms

    # testing for pga, pgv, sa
    target_imcs = ['ROTD(50.0)', 'ROTD(100.0)', 'GMROTD(50.0)',
                   'GMROTD(100.0)', 'HNR', 'HNT', 'GEOMETRIC_MEAN', 'ARITHMETIC_MEAN',
                   'QUADRATIC_MEAN', 'HN1', 'HN2',
                   'HNZ', 'GREATER_OF_TWO_HORIZONTALS']
    for col in ['PGA', 'PGV', 'SA(1.0)', 'SA(2.0)', 'SA(0.3)']:
        imt = pgms.loc[pgms['IMT'] == col]
        imcs = imt['IMC'].tolist()
        assert len(imcs) == len(target_imcs)
        np.testing.assert_array_equal(np.sort(imcs), np.sort(target_imcs))

    # testing for fas
    for col in ['FAS(1.0)', 'FAS(2.0)', 'FAS(0.3)']:
        imt = pgms.loc[pgms['IMT'] == col]
        imcs = imt['IMC'].tolist()
        assert len(imcs) == 3
        np.testing.assert_array_equal(np.sort(imcs), ['ARITHMETIC_MEAN',
                                                      'GEOMETRIC_MEAN', 'QUADRATIC_MEAN'])

    # testing for arias
    imt = pgms.loc[pgms['IMT'] == 'ARIAS']
    imcs = imt['IMC'].tolist()
    assert len(imcs) == 1
    np.testing.assert_array_equal(np.sort(imcs), ['ARITHMETIC_MEAN'])
    _validate_steps(m.step_sets, 'vel')