Пример #1
0
def test_arias():
    ddir = os.path.join('data', 'testdata')
    datadir = pkg_resources.resource_filename('gmprocess', ddir)
    data_file = os.path.join(datadir, 'arias_data.json')
    with open(data_file, 'rt') as f:
        jdict = json.load(f)

    time = np.array(jdict['time'])
    # input output is m/s/s
    acc = np.array(jdict['acc']) / 100
    target_IA = jdict['ia']
    delta = time[2] - time[1]
    sr = 1 / delta
    header = {
        'delta': delta,
        'sampling_rate': sr,
        'npts': len(acc),
        'units': 'm/s/s',
        'channel': 'HN1',
        'standard': {
            'corner_frequency': np.nan,
            'station_name': '',
            'source': 'json',
            'source_file': '',
            'instrument': '',
            'instrument_period': np.nan,
            'source_format': 'json',
            'comments': '',
            'structure_type': '',
            'sensor_serial_number': '',
            'process_level': 'raw counts',
            'process_time': '',
            'horizontal_orientation': np.nan,
            'units': 'acc',
            'instrument_damping': np.nan
        }
    }
    # input is cm/s/s output is m/s/s
    trace = StationTrace(data=acc * 100, header=header)
    trace2 = trace.copy()
    trace2.stats.channel = 'HN2'
    stream = StationStream([trace, trace2])
    station = StationSummary.from_stream(stream, ['ARITHMETIC_MEAN'],
                                         ['arias'])
    pgms = station.pgms
    Ia = pgms[(pgms.IMT == 'ARIAS')
              & (pgms.IMC == 'ARITHMETIC_MEAN')].Result.tolist()[0]
    # the target has only one decimal place and is in cm/s/s
    Ia = Ia * 100
    np.testing.assert_almost_equal(Ia, target_IA, decimal=1)

    # Test other components
    data_files, _ = read_data_dir('cwb', 'us1000chhc', '2-ECU.dat')
    stream = read_data(data_files[0])[0]
    station = StationSummary.from_stream(stream, [
        'channels', 'gmrotd', 'rotd50', 'greater_of_two_horizontals',
        'ARITHMETIC_MEAN'
    ], ['arias'])
    stream = StationSummary.from_stream(stream, ['gmrotd50'], ['arias'])
    assert stream.pgms.Result.tolist() == []
def test_allow_nans():
    dpath = os.path.join("data", "testdata", "fdsn", "uu60363602")
    datadir = pkg_resources.resource_filename("gmprocess", dpath)
    sc = StreamCollection.from_directory(datadir)
    origin = read_event_json_files([os.path.join(datadir, "event.json")])[0]
    psc = process_streams(sc, origin)
    st = psc[0]

    ss = StationSummary.from_stream(
        st,
        components=["quadratic_mean"],
        imts=["FAS(4.0)"],
        bandwidth=300,
        allow_nans=True,
    )
    assert np.isnan(ss.pgms.Result).all()

    ss = StationSummary.from_stream(
        st,
        components=["quadratic_mean"],
        imts=["FAS(4.0)"],
        bandwidth=189,
        allow_nans=False,
    )
    assert ~np.isnan(ss.pgms.Result).all()
Пример #3
0
def test_exceptions():
    ddir = os.path.join('data', 'testdata')
    datadir = pkg_resources.resource_filename('gmprocess', ddir)
    data_file = os.path.join(datadir, 'arias_data.json')
    with open(data_file, 'rt') as f:
        jdict = json.load(f)

    time = np.array(jdict['time'])
    # input output is m/s/s
    acc = np.array(jdict['acc']) / 100
    delta = time[2] - time[1]
    sr = 1 / delta
    header = {
        'delta': delta,
        'sampling_rate': sr,
        'npts': len(acc),
        'units': 'm/s/s',
        'channel': 'H1'
    }
    trace = Trace(data=acc, header=header)
    stream = Stream([trace])
    try:
        StationSummary.from_stream(stream, ['gmrotd50'], ['arias'])
        sucess = True
    except:
        sucess = False
    assert sucess == False

    try:
        StationSummary.from_stream(stream, ['rotd50'], ['arias'])
        sucess = True
    except:
        sucess = False
    assert sucess == False
def test_arias():
    ddir = os.path.join('data', 'testdata')
    datadir = pkg_resources.resource_filename('gmprocess', ddir)
    data_file = os.path.join(datadir, 'arias_data.json')
    with open(data_file, 'rt') as f:
        jdict = json.load(f)

    time = np.array(jdict['time'])
    # input output is m/s/s
    acc = np.array(jdict['acc']) / 100
    target_IA = jdict['ia']
    delta = time[2] - time[1]
    sr = 1 / delta
    header = {
        'delta': delta,
        'sampling_rate': sr,
        'npts': len(acc),
        'units': 'm/s/s',
        'channel': 'HN1',
        'standard': {'corner_frequency': np.nan,
            'station_name': '',
            'source': 'json',
            'source_file': '',
            'instrument': '',
            'instrument_period': np.nan,
            'source_format': 'json',
            'comments': '',
            'structure_type': '',
            'sensor_serial_number': '',
            'process_level': 'raw counts',
            'process_time': '',
            'horizontal_orientation': np.nan,
            'units': 'acc',
            'instrument_damping': np.nan}
    }
    # input is cm/s/s output is m/s/s
    trace = StationTrace(data=acc * 100, header=header)
    trace2 = trace.copy()
    trace2.stats.channel = 'HN2'
    stream = StationStream([trace, trace2])
    station = StationSummary.from_stream(stream, ['ARITHMETIC_MEAN'], ['arias'])
    pgms = station.pgms
    Ia = pgms[(pgms.IMT == 'ARIAS') & (pgms.IMC == 'ARITHMETIC_MEAN')].Result.tolist()[0]
    # the target has only one decimal place and is in cm/s/s
    Ia = Ia * 100
    np.testing.assert_almost_equal(Ia, target_IA, decimal=1)

    # Test other components
    data_files, _ = read_data_dir('cwb', 'us1000chhc', '2-ECU.dat')
    stream = read_data(data_files[0])[0]
    station = StationSummary.from_stream(stream,
                                         ['channels', 'gmrotd', 'rotd50',
                                             'greater_of_two_horizontals', 'ARITHMETIC_MEAN'],
                                         ['arias'])
    stream = StationSummary.from_stream(stream, ['gmrotd50'], ['arias'])
    assert stream.pgms.Result.tolist() == []
def test_exceptions():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    stream1 = stream_v2.select(channel="HN1")
    pgms = StationSummary.from_stream(stream1, ['gmrotd50'], ['pga']).pgms
    assert np.isnan(pgms.Result.iloc[0])

    for trace in stream_v2:
        stream1.append(trace)
    pgms = StationSummary.from_stream(stream1, ['gmrotd50'], ['pga']).pgms
    assert np.isnan(pgms.Result.iloc[0])
Пример #6
0
def test_exceptions():
    datafiles, _ = read_data_dir('geonet', 'us1000778i',
                                 '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    stream1 = stream_v2.select(channel="HN1")
    pgms = StationSummary.from_stream(stream1, ['gmrotd50'], ['pga']).pgms
    assert np.isnan(pgms.Result.iloc[0])

    for trace in stream_v2:
        stream1.append(trace)
    pgms = StationSummary.from_stream(stream1, ['gmrotd50'], ['pga']).pgms
    assert np.isnan(pgms.Result.iloc[0])
Пример #7
0
def test_pgv():
    datafiles, _ = read_data_dir("geonet", "us1000778i",
                                 "20161113_110259_WTMC_20.V2A")
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    pgv_target = {}
    for trace in stream_v2:
        vtrace = trace.copy()
        vtrace.integrate()
        pgv_target[vtrace.stats["channel"]] = np.abs(vtrace.max())

    # we've replaced HN1 etc. with H1 so channel names are not the same as
    # the original trace
    pgv_target["H1"] = pgv_target["HN1"]
    pgv_target["H2"] = pgv_target["HN2"]
    pgv_target["Z"] = pgv_target["HNZ"]

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(
            stream_v2,
            ["channels", "greater_of_two_horizontals", "gmrotd50"],
            ["pgv", "sa1.0", "saincorrect"],
        )
    pgv_df = station_summary.pgms.loc["PGV"]
    HN1 = pgv_df.loc["H1"].Result
    HN2 = pgv_df.loc["H2"].Result
    HNZ = pgv_df.loc["Z"].Result
    np.testing.assert_almost_equal(HN2, pgv_target["H2"])
    np.testing.assert_almost_equal(HN1, pgv_target["H1"])
    np.testing.assert_almost_equal(HNZ, pgv_target["Z"])
Пример #8
0
def test_pga():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(
            stream_v2,
            ['channels', 'greater_of_two_horizontals', 'gmrotd50',
             'gmrotd100', 'gmrotd0', 'rotd50', 'geometric_mean',
             'arithmetic_mean'],
            ['pga', 'sa1.0', 'saincorrect'])
    pga_df = station_summary.pgms[station_summary.pgms.IMT == 'PGA']
    AM = pga_df[pga_df.IMC == 'ARITHMETIC_MEAN'].Result.iloc[0]
    GM = pga_df[pga_df.IMC == 'GEOMETRIC_MEAN'].Result.iloc[0]
    HN1 = pga_df[pga_df.IMC == 'H1'].Result.iloc[0]
    HN2 = pga_df[pga_df.IMC == 'H2'].Result.iloc[0]
    HNZ = pga_df[pga_df.IMC == 'Z'].Result.iloc[0]
    greater = pga_df[pga_df.IMC == 'GREATER_OF_TWO_HORIZONTALS'].Result.iloc[0]
    np.testing.assert_allclose(
        AM, 90.242335558014219, rtol=1e-3)
    np.testing.assert_allclose(
        GM, 89.791654017670112, rtol=1e-3)
    np.testing.assert_allclose(
        HN2, 81.234672390673683, rtol=1e-3)
    np.testing.assert_allclose(
        HN1, 99.249998725354743, rtol=1e-3)
    np.testing.assert_almost_equal(
        HNZ, 183.77223618666929, decimal=1)
    np.testing.assert_allclose(
        greater, 99.249998725354743, rtol=1e-3)
Пример #9
0
def test_rotd():
    ddir = os.path.join('data', 'testdata', 'process')
    datadir = pkg_resources.resource_filename('gmprocess', ddir)
    # Create a stream and station summary, convert from m/s^2 to cm/s^2 (GAL)
    osc1_data = np.genfromtxt(datadir + '/ALCTENE.UW..sac.acc.final.txt')
    osc2_data = np.genfromtxt(datadir + '/ALCTENN.UW..sac.acc.final.txt')
    osc1_data = osc1_data.T[1] * 100
    osc2_data = osc2_data.T[1] * 100
    tr1 = Trace(data=osc1_data, header={'channel': 'H1', 'delta': 0.01,
                                        'npts': 10400})
    tr2 = Trace(data=osc2_data, header={'channel': 'H2', 'delta': 0.01,
                                        'npts': 10400})
    st = Stream([tr1, tr2])

    target_pga50 = 4.12528265306
    target_sa1050 = 10.7362857143
    target_pgv50 = 6.239364
    target_sa0350 = 10.1434159021
    target_sa3050 = 1.12614169215
    station = StationSummary.from_stream(st, ['rotd50'],
                                         ['pga', 'pgv', 'sa0.3', 'sa1.0', 'sa3.0'])
    pgms = station.pgms
    np.testing.assert_allclose(pgms['PGA']['ROTD50.0'], target_pga50, atol=0.1)
    np.testing.assert_allclose(
        pgms['SA(1.0)']['ROTD50.0'], target_sa1050, atol=0.1)
    np.testing.assert_allclose(pgms['PGV']['ROTD50.0'], target_pgv50, atol=0.1)
    np.testing.assert_allclose(
        pgms['SA(0.3)']['ROTD50.0'], target_sa0350, atol=0.1)
    np.testing.assert_allclose(
        pgms['SA(3.0)']['ROTD50.0'], target_sa3050, atol=0.1)
def test_pgv():
    datafiles, _ = read_data_dir('geonet', 'us1000778i',
                                 '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    pgv_target = {}
    for trace in stream_v2:
        vtrace = trace.copy()
        vtrace.integrate()
        pgv_target[vtrace.stats['channel']] = np.abs(vtrace.max())

    # we've replaced HN1 etc. with H1 so channel names are not the same as
    # the original trace
    pgv_target['H1'] = pgv_target['HN1']
    pgv_target['H2'] = pgv_target['HN2']
    pgv_target['Z'] = pgv_target['HNZ']

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(
            stream_v2, ['channels', 'greater_of_two_horizontals', 'gmrotd50'],
            ['pgv', 'sa1.0', 'saincorrect'])
    pgv_df = station_summary.pgms.loc['PGV']
    HN1 = pgv_df.loc['H1'].Result
    HN2 = pgv_df.loc['H2'].Result
    HNZ = pgv_df.loc['Z'].Result
    np.testing.assert_almost_equal(HN2, pgv_target['H2'])
    np.testing.assert_almost_equal(HN1, pgv_target['H1'])
    np.testing.assert_almost_equal(HNZ, pgv_target['Z'])
def test_get_peak_time():
    datafiles, _ = read_data_dir("geonet", "us1000778i",
                                 "20161113_110259_WTMC_20.V2A")
    datafile = datafiles[0]
    stream1 = read_geonet(datafile)[0]
    max_cls = Max(stream1).result
    assert len(max_cls) == 2

    max_cls = Max({"chan": [0, 1, 2, 3]}).result
    assert len(max_cls) == 1

    stream2 = read_geonet(datafile)[0]
    origin = Origin(latitude=42.6925, longitude=173.021944)
    stream_summary = StationSummary.from_stream(stream2, ["channels"],
                                                ["pgv", "pga"], origin)
    assert stream2[0].stats.pga_time == UTCDateTime(
        "2016-11-13T11:03:08.880001Z")
    assert stream2[0].stats.pgv_time == UTCDateTime(
        "2016-11-13T11:03:10.580001Z")

    assert stream2[1].stats.pga_time == UTCDateTime(
        "2016-11-13T11:03:09.960001Z")
    assert stream2[1].stats.pgv_time == UTCDateTime(
        "2016-11-13T11:03:08.860001Z")

    assert stream2[2].stats.pga_time == UTCDateTime(
        "2016-11-13T11:03:08.140001Z")
    assert stream2[2].stats.pgv_time == UTCDateTime(
        "2016-11-13T11:03:09.560001Z")
Пример #12
0
def test_sa():
    datafiles, _ = read_data_dir('geonet', 'us1000778i',
                                 '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    sa_target = {}
    for trace in stream_v2:
        vtrace = trace.copy()
        vtrace.integrate()
        sa_target[vtrace.stats['channel']] = np.abs(vtrace.max())
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(stream_v2, [
            'greater_of_two_horizontals', 'geometric_mean', 'rotd50',
            'arithmetic_mean', 'rotd100', 'gmrotd50', 'channels'
        ], ['sa1.0', 'saincorrect'])
    pgms = station_summary.pgms
    assert 'SA(1.000)' in pgms.IMT.tolist()
    np.testing.assert_allclose(
        pgms[pgms['IMC'] == 'ARITHMETIC_MEAN'].Result.iloc[0],
        110.47168962900042)
    np.testing.assert_allclose(
        pgms[pgms['IMC'] == 'GEOMETRIC_MEAN'].Result.iloc[0],
        107.42183990654802)
    np.testing.assert_allclose(
        pgms[pgms['IMC'] == 'ROTD(50.0)'].Result.iloc[0], 106.03202302692158)
    np.testing.assert_allclose(
        pgms[pgms['IMC'] == 'ROTD(100.0)'].Result.iloc[0], 146.90233501240979)
def test_exceptions():
    datafiles, _ = read_data_dir("geonet", "us1000778i",
                                 "20161113_110259_WTMC_20.V2A")
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    stream1 = stream_v2.select(channel="HN1")
    pgms = StationSummary.from_stream(stream1, ["rotd50"], ["pga"]).pgms
    assert np.isnan(pgms.loc["PGA", "ROTD(50.0)"].Result)
def test_exceptions():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    stream1 = stream_v2.select(channel="HN1")
    pgms = StationSummary.from_stream(stream1, ['rotd50'], ['pga']).pgms
    assert np.isnan(pgms[(pgms.IMT == 'PGA') & (pgms.IMC == 'ROTD(50.0)')].Result.iloc[0])
Пример #15
0
def test_exceptions():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    stream1 = stream_v2.select(channel="HN1")
    pgms = StationSummary.from_stream(stream1, ['rotd50'], ['pga']).pgms
    assert np.isnan(pgms.loc['PGA', 'ROTD(50.0)'].Result)
Пример #16
0
def test_allow_nans():
    dpath = os.path.join('data', 'testdata', 'fdsn', 'uu60363602')
    datadir = pkg_resources.resource_filename('gmprocess', dpath)
    sc = StreamCollection.from_directory(datadir)
    origin = read_event_json_files([os.path.join(datadir, 'event.json')])[0]
    psc = process_streams(sc, origin)
    st = psc[0]

    ss = StationSummary.from_stream(
        st, components=['quadratic_mean'], imts=['FAS(4.0)'], bandwidth=189,
        allow_nans=True)
    assert np.isnan(ss.pgms.Result).all()

    ss = StationSummary.from_stream(
        st, components=['quadratic_mean'], imts=['FAS(4.0)'], bandwidth=189,
        allow_nans=False)
    assert ~np.isnan(ss.pgms.Result).all()
Пример #17
0
def test_gmrotd():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]

    stream_v2 = read_geonet(datafile_v2)[0]
    station_summary = StationSummary.from_stream(stream_v2,
                                                 ['gmrotd0', 'gmrotd50',
                                                  'gmrotd100'], ['pga'])
def test_greater_of_two_horizontals():
    datafiles, _ = read_data_dir('geonet', 'us1000778i',
                                 '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    station_summary = StationSummary.from_stream(
        stream_v2, ['greater_of_two_horizontals'], ['pga'])
    pgms = station_summary.pgms
    greater = pgms.loc['PGA', 'GREATER_OF_TWO_HORIZONTALS'].Result
    np.testing.assert_almost_equal(greater, 99.3173469387755, decimal=1)
def test_greater_of_two_horizontals():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    station_summary = StationSummary.from_stream(stream_v2,
                                                 ['greater_of_two_horizontals'], ['pga'])
    station = station_summary.pgms[station_summary.pgms.IMT == 'PGA']
    greater = station[station.IMC == 'GREATER_OF_TWO_HORIZONTALS'].Result.iloc[0]
    np.testing.assert_almost_equal(greater, 99.3173469387755, decimal=1)
def test_greater_of_two_horizontals():
    datafiles, _ = read_data_dir("geonet", "us1000778i",
                                 "20161113_110259_WTMC_20.V2A")
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    station_summary = StationSummary.from_stream(
        stream_v2, ["greater_of_two_horizontals"], ["pga"])
    pgms = station_summary.pgms
    greater = pgms.loc["PGA", "GREATER_OF_TWO_HORIZONTALS"].Result
    np.testing.assert_almost_equal(greater, 99.3173469387755, decimal=1)
def test_gmrotd():
    datafiles, _ = read_data_dir("geonet", "us1000778i",
                                 "20161113_110259_WTMC_20.V2A")
    datafile_v2 = datafiles[0]

    stream_v2 = read_geonet(datafile_v2)[0]
    station_summary = StationSummary.from_stream(
        stream_v2, ["gmrotd0", "gmrotd50", "gmrotd100"], ["pga"])
    pgms = station_summary.pgms
    assert "GMROTD(50.0)" in pgms.index.get_level_values(1)
Пример #22
0
def test_gmrotd():
    datafiles, _ = read_data_dir('geonet', 'us1000778i',
                                 '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]

    stream_v2 = read_geonet(datafile_v2)[0]
    station_summary = StationSummary.from_stream(
        stream_v2, ['gmrotd0', 'gmrotd50', 'gmrotd100'], ['pga'])
    pgms = station_summary.pgms
    assert 'GMROTD(50.0)' in pgms.IMC.tolist()
Пример #23
0
def test_arias():
    ddir = os.path.join('data', 'testdata')
    datadir = pkg_resources.resource_filename('gmprocess', ddir)
    data_file = os.path.join(datadir, 'arias_data.json')
    with open(data_file, 'rt') as f:
        jdict = json.load(f)

    time = np.array(jdict['time'])
    # input output is m/s/s
    acc = np.array(jdict['acc']) / 100
    target_IA = jdict['ia']
    delta = time[2] - time[1]
    sr = 1 / delta
    header = {
        'delta': delta,
        'sampling_rate': sr,
        'npts': len(acc),
        'units': 'm/s/s',
        'channel': 'H1'
    }
    trace = Trace(data=acc, header=header)
    stream = Stream([trace])
    Ia = calculate_arias(stream, ['channels'])['H1']
    # the target has only one decimal place and is in cm/s/s
    Ia = Ia * 100
    np.testing.assert_almost_equal(Ia, target_IA, decimal=1)

    # input is cm/s/s output is m/s/s
    trace = Trace(data=acc * 100, header=header)
    stream = Stream([trace])
    station = StationSummary.from_stream(stream, ['channels'], ['arias'])
    Ia = station.pgms['ARIAS']['H1']
    # the target has only one decimal place and is in cm/s/s
    Ia = Ia * 100
    np.testing.assert_almost_equal(Ia, target_IA, decimal=1)

    # Test other components
    data_files, _ = read_data_dir('cwb', 'us1000chhc', '2-ECU.dat')
    stream = read_data(data_files[0])[0]
    station = StationSummary.from_stream(
        stream, ['channels', 'gmrotd', 'rotd50', 'greater_of_two_horizontals'],
        ['arias'])
def test_gmrotd():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]

    stream_v2 = read_geonet(datafile_v2)[0]
    station_summary = StationSummary.from_stream(stream_v2,
                                                 ['gmrotd0', 'gmrotd50',
                                                  'gmrotd100'], ['pga'])
    pgms = station_summary.pgms
    assert 'GMROTD(50.0)' in pgms.IMC.tolist()
Пример #25
0
def test_exceptions():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    stream1 = stream_v2.select(channel="HN1")
    try:
        StationSummary.from_stream(stream1, ['rotd50'], ['pga'])
        sucess = True
    except PGMException:
        sucess = False
    assert sucess == False

    stream2 = Stream(
        [stream_v2.select(channel="HN1")[0],
            Trace(data=np.asarray([]), header={"channel": "HN2"})])
    try:
        StationSummary.from_stream(stream2,
                                   ['rotd50'], ['pga'])
        sucess = True
    except PGMException:
        sucess = False
    assert sucess == False

    for trace in stream_v2:
        stream1.append(trace)
    try:
        StationSummary.from_stream(stream1, ['rotd50'], ['pga'])
        sucess = True
    except PGMException:
        sucess = False
    assert sucess == False
def test_sorted_duration():
    ddir = os.path.join("data", "testdata", "cosmos", "us1000hyfh")
    datadir = pkg_resources.resource_filename("gmprocess", ddir)
    data_file = os.path.join(
        datadir, "us1000hyfh_akbmrp_AKBMR--n.1000hyfh.BNZ.--.acc.V2c")
    stream = read_data(data_file)[0]

    station = StationSummary.from_stream(stream, ["channels"],
                                         ["sorted_duration"])
    pgms = station.pgms
    sorted_duration = pgms.loc["SORTED_DURATION", "CHANNELS"].Result

    np.testing.assert_allclose(sorted_duration, 36.805, atol=1e-4, rtol=1e-4)
Пример #27
0
def test_sorted_duration():
    ddir = os.path.join('data', 'testdata', 'cosmos', 'us1000hyfh')
    datadir = pkg_resources.resource_filename('gmprocess', ddir)
    data_file = os.path.join(
        datadir, 'us1000hyfh_akbmrp_AKBMR--n.1000hyfh.BNZ.--.acc.V2c')
    stream = read_data(data_file)[0]

    station = StationSummary.from_stream(stream, ['channels'],
                                         ['sorted_duration'])
    pgms = station.pgms
    sorted_duration = pgms.loc['SORTED_DURATION', 'CHANNELS'].Result

    np.testing.assert_allclose(sorted_duration, 36.805, atol=1e-4, rtol=1e-4)
Пример #28
0
def test_channels():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    station_summary = StationSummary.from_stream(stream_v2,
                                                 ['channels'], ['pga'])
    pgms = station_summary.pgms
    np.testing.assert_almost_equal(
        pgms.loc['PGA', 'H2'].Result, 81.28979591836733, decimal=1)
    np.testing.assert_almost_equal(
        pgms.loc['PGA', 'H1'].Result, 99.3173469387755, decimal=1)
    np.testing.assert_almost_equal(
        pgms.loc['PGA', 'Z'].Result, 183.89693877551022, decimal=1)
Пример #29
0
def test_channels():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    station_summary = StationSummary.from_stream(stream_v2,
                                                 ['channels'], ['pga'])
    channel = station_summary.pgms[station_summary.pgms.IMT == 'PGA']
    np.testing.assert_almost_equal(
        channel[channel.IMC == 'HN2'].Result.iloc[0], 81.28979591836733, decimal=1)
    np.testing.assert_almost_equal(
        channel[channel.IMC == 'HN1'].Result.iloc[0], 99.3173469387755, decimal=1)
    np.testing.assert_almost_equal(
        channel[channel.IMC == 'HNZ'].Result.iloc[0], 183.89693877551022, decimal=1)
Пример #30
0
    def setStreamMetrics(self,
                         eventid,
                         stations=None,
                         labels=None,
                         imclist=None,
                         imtlist=None,
                         origin=None):
        """Create station metrics for specified event/streams.

        Args:
            eventid (str):
                ID of event to search for in ASDF file.
            stations (list):
                List of stations to create metrics for.
            labels (list):
                List of processing labels to create metrics for.
            imclist (list):
                List of valid component names.
            imtlist (list):
                List of valid IMT names.
            origin (obspy event origin object):
                Origin object for the event.
        """
        if not self.hasEvent(eventid):
            fmt = 'No event matching %s found in workspace.'
            raise KeyError(fmt % eventid)

        streams = self.getStreams(eventid, stations=stations, labels=labels)

        for stream in streams:
            tag = stream.tag
            station, label = tag.split('_')
            summary = StationSummary.from_stream(stream,
                                                 components=imclist,
                                                 imts=imtlist,
                                                 origin=origin)
            xmlstr = summary.getMetricXML()

            path = '%s_%s_%s' % (eventid, summary.station_code.lower(), label)

            # this seems like a lot of effort
            # just to store a string in HDF, but other
            # approached failed. Suggestions are welcome.
            jsonarray = np.frombuffer(xmlstr, dtype=np.uint8)
            dtype = 'WaveFormMetrics'
            self.dataset.add_auxiliary_data(jsonarray,
                                            data_type=dtype,
                                            path=path,
                                            parameters={})
Пример #31
0
def test_channels():
    datafiles, _ = read_data_dir("geonet", "us1000778i", "20161113_110259_WTMC_20.V2A")
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    station_summary = StationSummary.from_stream(stream_v2, ["channels"], ["pga"])
    pgms = station_summary.pgms
    np.testing.assert_almost_equal(
        pgms.loc["PGA", "H2"].Result, 81.28979591836733, decimal=1
    )
    np.testing.assert_almost_equal(
        pgms.loc["PGA", "H1"].Result, 99.3173469387755, decimal=1
    )
    np.testing.assert_almost_equal(
        pgms.loc["PGA", "Z"].Result, 183.89693877551022, decimal=1
    )
    def calcStreamMetrics(self, eventid, stations=None,
                          labels=None, imclist=None, imtlist=None):
        """Create station metrics for specified event/streams.

        Args:
            eventid (str):
                ID of event to search for in ASDF file.
            stations (list):
                List of stations to create metrics for.
            labels (list):
                List of processing labels to create metrics for.
            imclist (list):
                List of valid component names.
            imtlist (list):
                List of valid IMT names.
            origin (obspy event origin object):
                Origin object for the event.
        """
        if not self.hasEvent(eventid):
            fmt = 'No event matching %s found in workspace.'
            raise KeyError(fmt % eventid)

        streams = self.getStreams(eventid, stations=stations, labels=labels)
        event = self.getEvent(eventid)
        for stream in streams:
            tag = stream.tag
            station, label = tag.split('_')
            if imclist is None and imtlist is None:
                summary = StationSummary.from_config(stream,
                                                     event=event)
            else:
                summary = StationSummary.from_stream(stream,
                                                     components=imclist,
                                                     imts=imtlist,
                                                     event=event)
            xmlstr = summary.getMetricXML()

            path = '%s_%s_%s' % (eventid, summary.station_code.lower(), label)

            # this seems like a lot of effort
            # just to store a string in HDF, but other
            # approached failed. Suggestions are welcome.
            jsonarray = np.frombuffer(xmlstr, dtype=np.uint8)
            dtype = 'WaveFormMetrics'
            self.dataset.add_auxiliary_data(jsonarray,
                                            data_type=dtype,
                                            path=path,
                                            parameters={})
Пример #33
0
def test_sa():
    datafiles, _ = read_data_dir('geonet', 'us1000778i',
                                 '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    sa_target = {}
    for trace in stream_v2:
        vtrace = trace.copy()
        vtrace.integrate()
        sa_target[vtrace.stats['channel']] = np.abs(vtrace.max())
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(
            stream_v2, ['greater_of_two_horizontals', 'gmrotd50', 'channels'],
            ['sa1.0', 'saincorrect'])
    assert 'SA(1.0)' in station_summary.pgms
def test_sa():
    datafiles, _ = read_data_dir("geonet", "us1000778i", "20161113_110259_WTMC_20.V2A")
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    sa_target = {}
    for trace in stream_v2:
        vtrace = trace.copy()
        vtrace.integrate()
        sa_target[vtrace.stats["channel"]] = np.abs(vtrace.max())
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(
            stream_v2,
            [
                "greater_of_two_horizontals",
                "geometric_mean",
                "rotd50",
                "arithmetic_mean",
                "rotd100",
                "gmrotd50",
                "channels",
            ],
            ["sa1.0", "sa0.01", "saincorrect"],
        )
    pgms = station_summary.pgms
    assert "SA(1.000)" in pgms.index.get_level_values(0)
    np.testing.assert_allclose(
        pgms.loc["SA(1.000)", "ARITHMETIC_MEAN"].Result, 110.47168962900042
    )
    np.testing.assert_allclose(
        pgms.loc["SA(1.000)", "GEOMETRIC_MEAN"].Result, 107.42183990654802
    )
    np.testing.assert_allclose(
        pgms.loc["SA(1.000)", "ROTD(50.0)"].Result, 106.03202302692158
    )
    np.testing.assert_allclose(
        pgms.loc["SA(1.000)", "ROTD(100.0)"].Result, 146.90233501240979
    )
    # Check high frequency SA
    np.testing.assert_allclose(pgms.loc["SA(0.010)", "ROTD(100.0)"].Result, 120.187153)
    np.testing.assert_allclose(pgms.loc["SA(0.010)", "GMROTD(50.0)"].Result, 95.355300)
    np.testing.assert_allclose(pgms.loc["SA(0.010)", "H1"].Result, 106.716122)
    np.testing.assert_allclose(pgms.loc["SA(0.010)", "H2"].Result, 90.497883)
    np.testing.assert_allclose(pgms.loc["SA(0.010)", "GMROTD(50.0)"].Result, 95.355300)
Пример #35
0
def test_pgv():
    datafiles, _ = read_data_dir('geonet', 'us1000778i',
                                 '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    pgv_target = {}
    for trace in stream_v2:
        vtrace = trace.copy()
        vtrace.integrate()
        pgv_target[vtrace.stats['channel']] = np.abs(vtrace.max())
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(
            stream_v2, ['channels', 'greater_of_two_horizontals', 'gmrotd50'],
            ['pgv', 'sa1.0', 'saincorrect'])
    station_dict = station_summary.pgms['PGV']
    np.testing.assert_almost_equal(station_dict['HN2'], pgv_target['HN2'])
    np.testing.assert_almost_equal(station_dict['HN1'], pgv_target['HN1'])
    np.testing.assert_almost_equal(station_dict['HNZ'], pgv_target['HNZ'])
Пример #36
0
def test_pga():
    datafiles, _ = read_data_dir("geonet", "us1000778i", "20161113_110259_WTMC_20.V2A")
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(
            stream_v2,
            [
                "channels",
                "greater_of_two_horizontals",
                "gmrotd0",
                "gmrotd50",
                "gmrotd100",
                "rotd50",
                "geometric_mean",
                "arithmetic_mean",
            ],
            ["pga", "sa1.0", "saincorrect"],
        )
    pga_df = station_summary.pgms.loc["PGA"]
    AM = pga_df.loc["ARITHMETIC_MEAN"].Result
    GM = pga_df.loc["GEOMETRIC_MEAN"].Result
    HN1 = pga_df.loc["H1"].Result
    HN2 = pga_df.loc["H2"].Result
    HNZ = pga_df.loc["Z"].Result
    gmrotd0 = pga_df.loc["GMROTD(0.0)"].Result
    gmrotd50 = pga_df.loc["GMROTD(50.0)"].Result
    gmrotd100 = pga_df.loc["GMROTD(100.0)"].Result
    rotd50 = pga_df.loc["ROTD(50.0)"].Result
    greater = pga_df.loc["GREATER_OF_TWO_HORIZONTALS"].Result
    np.testing.assert_allclose(AM, 90.242335558014219)
    np.testing.assert_allclose(GM, 89.791654017670112)
    np.testing.assert_allclose(HN2, 81.234672390673683)
    np.testing.assert_allclose(HN1, 99.249998725354743)
    np.testing.assert_almost_equal(HNZ, 183.77223618666929)
    np.testing.assert_allclose(greater, 99.249998725354743)

    np.testing.assert_allclose(gmrotd0, 83.487703753812113)
    np.testing.assert_allclose(gmrotd50, 86.758642638162982)
    np.testing.assert_allclose(gmrotd100, 89.791654017670112)
    np.testing.assert_allclose(rotd50, 91.401785419354567)
def test_pgv():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    pgv_target = {}
    for trace in stream_v2:
        vtrace = trace.copy()
        vtrace.integrate()
        pgv_target[vtrace.stats['channel']] = np.abs(vtrace.max())
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(stream_v2,
                                                     ['channels', 'greater_of_two_horizontals',
                                                         'gmrotd50'],
                                                     ['pgv', 'sa1.0', 'saincorrect'])
    pgv_df = station_summary.pgms[station_summary.pgms.IMT == 'PGV']
    HN1 = pgv_df[pgv_df.IMC == 'HN1'].Result.iloc[0]
    HN2 = pgv_df[pgv_df.IMC == 'HN2'].Result.iloc[0]
    HNZ = pgv_df[pgv_df.IMC == 'HNZ'].Result.iloc[0]
    np.testing.assert_almost_equal(HN2, pgv_target['HN2'])
    np.testing.assert_almost_equal(HN1, pgv_target['HN1'])
    np.testing.assert_almost_equal(HNZ, pgv_target['HNZ'])
def test_pga():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile_v2 = datafiles[0]
    stream_v2 = read_geonet(datafile_v2)[0]
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(stream_v2,
                                                     ['channels', 'greater_of_two_horizontals', 'gmrotd50',
                                                      'gmrotd100', 'gmrotd0'],
                                                     ['pga', 'sa1.0', 'saincorrect'])
    pga_df = station_summary.pgms[station_summary.pgms.IMT == 'PGA']
    HN1 = pga_df[pga_df.IMC == 'HN1'].Result.iloc[0]
    HN2 = pga_df[pga_df.IMC == 'HN2'].Result.iloc[0]
    HNZ = pga_df[pga_df.IMC == 'HNZ'].Result.iloc[0]
    greater = pga_df[pga_df.IMC == 'GREATER_OF_TWO_HORIZONTALS'].Result.iloc[0]
    np.testing.assert_almost_equal(
        HN2, 81.28979591836733, decimal=1)
    np.testing.assert_almost_equal(
        HN1, 99.3173469387755, decimal=1)
    np.testing.assert_almost_equal(
        HNZ, 183.89693877551022, decimal=1)
    np.testing.assert_almost_equal(greater, 99.3173469387755, decimal=1)
def test_rotd():
    ddir = os.path.join('data', 'testdata', 'process')
    datadir = pkg_resources.resource_filename('gmprocess', ddir)
    # Create a stream and station summary, convert from m/s^2 to cm/s^2 (GAL)
    osc1_data = np.genfromtxt(datadir + '/ALCTENE.UW..sac.acc.final.txt')
    osc2_data = np.genfromtxt(datadir + '/ALCTENN.UW..sac.acc.final.txt')
    osc1_data = osc1_data.T[1] * 100
    osc2_data = osc2_data.T[1] * 100
    tr1 = StationTrace(data=osc1_data, header={'channel': 'HN1', 'delta': 0.01,
                                        'npts': 24001, 'standard':
                                                {'corner_frequency': np.nan,
                                                    'station_name': '',
                                                    'source': 'json',
                                                    'instrument': '',
                                                    'instrument_period': np.nan,
                                                    'source_format': 'json',
                                                    'comments': '',
                                                    'source_file': '',
                                                    'structure_type': '',
                                                    'horizontal_orientation': np.nan,
                                                    'sensor_serial_number': '',
                                                    'process_level': 'raw counts',
                                                    'process_time': '',
                                                    'units': 'acc',
                                                    'instrument_damping': np.nan}})
    tr2 = StationTrace(data=osc2_data, header={'channel': 'HN2', 'delta': 0.01,
                                        'npts': 24001, 'standard':
                                                {'corner_frequency': np.nan,
                                                    'station_name': '',
                                                    'source': 'json',
                                                    'instrument': '',
                                                    'instrument_period': np.nan,
                                                    'source_format': 'json',
                                                    'comments': '',
                                                    'structure_type': '',
                                                    'source_file': '',
                                                    'horizontal_orientation': np.nan,
                                                    'sensor_serial_number': '',
                                                    'process_level': 'raw counts',
                                                    'process_time': '',
                                                    'units': 'acc',
                                                    'instrument_damping': np.nan}})
    st = StationStream([tr1, tr2])

    target_pga50 = 4.12528265306
    target_sa1050 = 10.7362857143
    target_pgv50 = 6.239364
    target_sa0350 = 10.1434159021
    target_sa3050 = 1.12614169215
    station = StationSummary.from_stream(st, ['rotd50'],
                                         ['pga', 'pgv', 'sa0.3', 'sa1.0', 'sa3.0'])

    pgms = station.pgms
    ROTD50 = pgms[pgms.IMC == 'ROTD(50.0)']
    pga = ROTD50[ROTD50.IMT == 'PGA'].Result.iloc[0]
    pgv = ROTD50[ROTD50.IMT == 'PGV'].Result.iloc[0]
    SA10 = ROTD50[ROTD50.IMT == 'SA(1.0)'].Result.iloc[0]
    SA03 = ROTD50[ROTD50.IMT == 'SA(0.3)'].Result.iloc[0]
    SA30 = ROTD50[ROTD50.IMT == 'SA(3.0)'].Result.iloc[0]
    np.testing.assert_allclose(pga, target_pga50, atol=0.1)
    np.testing.assert_allclose(SA10, target_sa1050, atol=0.1)
    np.testing.assert_allclose(pgv, target_pgv50, atol=0.1)
    np.testing.assert_allclose(SA03, target_sa0350, atol=0.1)
    np.testing.assert_allclose(SA30, target_sa3050, atol=0.1)
def streams_to_dataframe(streams, imcs=None, imts=None, event=None):
    """Extract peak ground motions from list of processed StationStream objects.

    Note: The PGM columns underneath each channel will be variable
    depending on the units of the Stream being passed in (velocity
    sensors can only generate PGV) and on the imtlist passed in by
    user. Spectral acceleration columns will be formatted as SA(0.3)
    for 0.3 second spectral acceleration, for example.

    Args:
        directory (str):
            Directory of ground motion files (streams).
        imcs (list):
            Strings designating desired components to create in table.
        imts (list):
            Strings designating desired PGMs to create in table.
        event (ScalarEvent): Defines the focal time, 
                geographic location, and magnitude of an earthquake hypocenter.
                Default is None.

    Returns:
        DataFrame: Pandas dataframe containing columns:
            - STATION Station code.
            - NAME Text description of station.
            - LOCATION Two character location code.
            - SOURCE Long form string containing source network.
            - NETWORK Short network code.
            - LAT Station latitude
            - LON Station longitude
            - DISTANCE Epicentral distance (km) (if epicentral
              lat/lon provided)
            - HN1 East-west channel (or H1) (multi-index with pgm columns):
                - PGA Peak ground acceleration (%g).
                - PGV Peak ground velocity (cm/s).
                - SA(0.3) Pseudo-spectral acceleration at 0.3 seconds (%g).
                - SA(1.0) Pseudo-spectral acceleration at 1.0 seconds (%g).
                - SA(3.0) Pseudo-spectral acceleration at 3.0 seconds (%g).
            - HN2 North-south channel (or H2) (multi-index with pgm columns):
                - PGA Peak ground acceleration (%g).
                - PGV Peak ground velocity (cm/s).
                - SA(0.3) Pseudo-spectral acceleration at 0.3 seconds (%g).
                - SA(1.0) Pseudo-spectral acceleration at 1.0 seconds (%g).
                - SA(3.0) Pseudo-spectral acceleration at 3.0 seconds (%g).
            - HNZ Vertical channel (or HZ) (multi-index with pgm columns):
                - PGA Peak ground acceleration (%g).
                - PGV Peak ground velocity (cm/s).
                - SA(0.3) Pseudo-spectral acceleration at 0.3 seconds (%g).
                - SA(1.0) Pseudo-spectral acceleration at 1.0 seconds (%g).
                - SA(3.0) Pseudo-spectral acceleration at 3.0 seconds (%g).
            - GREATER_OF_TWO_HORIZONTALS (multi-index with pgm columns):
                - PGA Peak ground acceleration (%g).
                - PGV Peak ground velocity (cm/s).
                - SA(0.3) Pseudo-spectral acceleration at 0.3 seconds (%g).
                - SA(1.0) Pseudo-spectral acceleration at 1.0 seconds (%g).
                - SA(3.0) Pseudo-spectral acceleration at 3.0 seconds (%g).
    """

    if imcs is None:
        station_summary_imcs = DEFAULT_IMCS
    else:
        station_summary_imcs = imcs
    if imts is None:
        station_summary_imts = DEFAULT_IMTS
    else:
        station_summary_imts = imts

    subdfs = []
    for stream in streams:
        if not stream.passed:
            continue
        if len(stream) < 3:
            continue
        stream_summary = StationSummary.from_stream(
            stream, station_summary_imcs, station_summary_imts, event)
        summary = stream_summary.summary
        subdfs += [summary]
    dataframe = pd.concat(subdfs, axis=0).reset_index(drop=True)

    return dataframe
def test_stationsummary():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile = datafiles[0]
    origin = Origin(latitude=42.6925, longitude=173.021944)

    target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS',
                                      'HN1', 'HN2', 'HNZ', 'ROTD(50.0)',
                                      'ROTD(100.0)']))
    target_imts = np.sort(np.asarray(['SA(1.0)', 'PGA', 'PGV']))
    stream = read_geonet(datafile)[0]
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        stream_summary = StationSummary.from_stream(
            stream,
            ['greater_of_two_horizontals',
             'channels',
             'rotd50',
             'rotd100',
             'invalid'],
            ['sa1.0', 'PGA', 'pgv', 'invalid'], origin)
        original_stream = stream_summary.stream
        stream_summary.stream = []
        final_stream = stream_summary.stream
        assert original_stream == final_stream
        original_code = stream_summary.station_code
        np.testing.assert_array_equal(np.sort(stream_summary.components),
                                      target_imcs)
        np.testing.assert_array_equal(np.sort(stream_summary.imts),
                                      target_imts)
        np.testing.assert_almost_equal(stream_summary.get_pgm('PGA', 'HN1'),
                                       99.3173469387755, decimal=1)
        target_available = np.sort(np.asarray([
            'greater_of_two_horizontals', 'geometric_mean', 'arithmetic_mean',
            'channels', 'gmrotd', 'rotd', 'quadratic_mean',
            'radial_transverse']))
        imcs = stream_summary.available_imcs
        np.testing.assert_array_equal(np.sort(imcs), target_available)
        target_available = np.sort(np.asarray(['pga',
                                               'pgv',
                                               'sa',
                                               'arias',
                                               'fas']))
        imts = stream_summary.available_imts
        np.testing.assert_array_equal(np.sort(imts), target_available)
    test_pgms = {
        'PGV': {
            'ROTD(100.0)': 114.24894584734818,
            'ROTD(50.0)': 81.55436750525355,
            'HNZ': 37.47740000000001,
            'HN1': 100.81460000000004,
            'HN2': 68.4354,
            'GREATER_OF_TWO_HORIZONTALS': 100.81460000000004},
        'PGA': {
            'ROTD(100.0)': 100.73875535385548,
            'ROTD(50.0)': 91.40178541935455,
            'HNZ': 183.7722361866693,
            'HN1': 99.24999872535474,
            'HN2': 81.23467239067368,
            'GREATER_OF_TWO_HORIZONTALS': 99.24999872535474},
        'SA(1.0)': {
            'ROTD(100.0)': 146.9023350124098,
            'ROTD(50.0)': 106.03202302692158,
            'HNZ': 27.74118995438756,
            'HN1': 136.25041187387063,
            'HN2': 84.69296738413021,
            'GREATER_OF_TWO_HORIZONTALS': 136.25041187387063}
    }
    pgms = stream_summary.pgms
    for imt_str in test_pgms:
        for imc_str in test_pgms[imt_str]:
            imt = pgms.loc[pgms['IMT'] == imt_str]
            imc = imt.loc[imt['IMC'] == imc_str]
            results = imc.Result.tolist()
            assert len(results) == 1
            np.testing.assert_almost_equal(results[0], test_pgms[imt_str][imc_str],
                    decimal=10)

    # Test with fas
    stream = read_geonet(datafile)[0]
    stream_summary = StationSummary.from_stream(
        stream,
        ['greater_of_two_horizontals',
         'channels',
         'geometric_mean'],
        ['sa1.0', 'PGA', 'pgv', 'fas2.0'])
    target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS',
                                      'HN1', 'HN2', 'HNZ',
                                      'GEOMETRIC_MEAN']))
    target_imts = np.sort(np.asarray(['SA(1.0)',
                                      'PGA', 'PGV', 'FAS(2.0)']))
    np.testing.assert_array_equal(np.sort(stream_summary.components),
                                  target_imcs)
    np.testing.assert_array_equal(np.sort(stream_summary.imts),
                                  target_imts)

    # Test config use
    stream = read_geonet(datafile)[0]
    stream_summary = StationSummary.from_config(stream)
    target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS',
                                      'HN1', 'HN2', 'HNZ']))
    target_imts = np.sort(np.asarray(['SA(1.0)', 'SA(2.0)', 'SA(3.0)',
                                      'SA(0.3)', 'PGA', 'PGV', 'FAS(1.0)', 'FAS(2.0)',
                                      'FAS(3.0)', 'FAS(0.3)']))
    assert(stream_summary.smoothing == 'konno_ohmachi')
    assert(stream_summary.bandwidth == 20.0)
    assert(stream_summary.damping == 0.05)

    # test XML output
    stream = read_geonet(datafile)[0]
    imclist = ['greater_of_two_horizontals',
               'channels',
               'rotd50.0',
               'rotd100.0']
    imtlist = ['sa1.0', 'PGA', 'pgv', 'fas2.0', 'arias']
    stream_summary = StationSummary.from_stream(stream, imclist, imtlist)
    xmlstr = stream_summary.getMetricXML()
    print(xmlstr.decode('utf-8'))

    stream2 = StationSummary.fromMetricXML(xmlstr)
    cmp1 = np.sort(['GREATER_OF_TWO_HORIZONTALS', 'HN1', 'HN2', 'HNZ',
            'ROTD100.0', 'ROTD50.0'])
    cmp2 = np.sort(stream2.components)
    np.testing.assert_array_equal(cmp1, cmp2)
    imt1 = np.sort(stream_summary.imts)
    imt2 = np.sort(stream2.imts)
    np.testing.assert_array_equal(imt1, imt2)