def test_get_peak_time(): datafiles, _ = read_data_dir("geonet", "us1000778i", "20161113_110259_WTMC_20.V2A") datafile = datafiles[0] stream1 = read_geonet(datafile)[0] max_cls = Max(stream1).result assert len(max_cls) == 2 max_cls = Max({"chan": [0, 1, 2, 3]}).result assert len(max_cls) == 1 stream2 = read_geonet(datafile)[0] origin = Origin(latitude=42.6925, longitude=173.021944) stream_summary = StationSummary.from_stream(stream2, ["channels"], ["pgv", "pga"], origin) assert stream2[0].stats.pga_time == UTCDateTime( "2016-11-13T11:03:08.880001Z") assert stream2[0].stats.pgv_time == UTCDateTime( "2016-11-13T11:03:10.580001Z") assert stream2[1].stats.pga_time == UTCDateTime( "2016-11-13T11:03:09.960001Z") assert stream2[1].stats.pgv_time == UTCDateTime( "2016-11-13T11:03:08.860001Z") assert stream2[2].stats.pga_time == UTCDateTime( "2016-11-13T11:03:08.140001Z") assert stream2[2].stats.pgv_time == UTCDateTime( "2016-11-13T11:03:09.560001Z")
def test_sa(): datafiles, _ = read_data_dir('geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] sa_target = {} for trace in stream_v2: vtrace = trace.copy() vtrace.integrate() sa_target[vtrace.stats['channel']] = np.abs(vtrace.max()) with warnings.catch_warnings(): warnings.simplefilter("ignore") station_summary = StationSummary.from_stream(stream_v2, [ 'greater_of_two_horizontals', 'geometric_mean', 'rotd50', 'arithmetic_mean', 'rotd100', 'gmrotd50', 'channels' ], ['sa1.0', 'saincorrect']) pgms = station_summary.pgms assert 'SA(1.000)' in pgms.IMT.tolist() np.testing.assert_allclose( pgms[pgms['IMC'] == 'ARITHMETIC_MEAN'].Result.iloc[0], 110.47168962900042) np.testing.assert_allclose( pgms[pgms['IMC'] == 'GEOMETRIC_MEAN'].Result.iloc[0], 107.42183990654802) np.testing.assert_allclose( pgms[pgms['IMC'] == 'ROTD(50.0)'].Result.iloc[0], 106.03202302692158) np.testing.assert_allclose( pgms[pgms['IMC'] == 'ROTD(100.0)'].Result.iloc[0], 146.90233501240979)
def test_pgv(): datafiles, _ = read_data_dir('geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] pgv_target = {} for trace in stream_v2: vtrace = trace.copy() vtrace.integrate() pgv_target[vtrace.stats['channel']] = np.abs(vtrace.max()) # we've replaced HN1 etc. with H1 so channel names are not the same as # the original trace pgv_target['H1'] = pgv_target['HN1'] pgv_target['H2'] = pgv_target['HN2'] pgv_target['Z'] = pgv_target['HNZ'] with warnings.catch_warnings(): warnings.simplefilter("ignore") station_summary = StationSummary.from_stream( stream_v2, ['channels', 'greater_of_two_horizontals', 'gmrotd50'], ['pgv', 'sa1.0', 'saincorrect']) pgv_df = station_summary.pgms.loc['PGV'] HN1 = pgv_df.loc['H1'].Result HN2 = pgv_df.loc['H2'].Result HNZ = pgv_df.loc['Z'].Result np.testing.assert_almost_equal(HN2, pgv_target['H2']) np.testing.assert_almost_equal(HN1, pgv_target['H1']) np.testing.assert_almost_equal(HNZ, pgv_target['Z'])
def test_exceptions(): datafiles, _ = read_data_dir( 'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] stream1 = stream_v2.select(channel="HN1") try: StationSummary.from_stream(stream1, ['rotd50'], ['pga']) sucess = True except PGMException: sucess = False assert sucess == False stream2 = Stream( [stream_v2.select(channel="HN1")[0], Trace(data=np.asarray([]), header={"channel": "HN2"})]) try: StationSummary.from_stream(stream2, ['rotd50'], ['pga']) sucess = True except PGMException: sucess = False assert sucess == False for trace in stream_v2: stream1.append(trace) try: StationSummary.from_stream(stream1, ['rotd50'], ['pga']) sucess = True except PGMException: sucess = False assert sucess == False
def test_pga(): datafiles, _ = read_data_dir( 'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] with warnings.catch_warnings(): warnings.simplefilter("ignore") station_summary = StationSummary.from_stream( stream_v2, ['channels', 'greater_of_two_horizontals', 'gmrotd50', 'gmrotd100', 'gmrotd0', 'rotd50', 'geometric_mean', 'arithmetic_mean'], ['pga', 'sa1.0', 'saincorrect']) pga_df = station_summary.pgms[station_summary.pgms.IMT == 'PGA'] AM = pga_df[pga_df.IMC == 'ARITHMETIC_MEAN'].Result.iloc[0] GM = pga_df[pga_df.IMC == 'GEOMETRIC_MEAN'].Result.iloc[0] HN1 = pga_df[pga_df.IMC == 'H1'].Result.iloc[0] HN2 = pga_df[pga_df.IMC == 'H2'].Result.iloc[0] HNZ = pga_df[pga_df.IMC == 'Z'].Result.iloc[0] greater = pga_df[pga_df.IMC == 'GREATER_OF_TWO_HORIZONTALS'].Result.iloc[0] np.testing.assert_allclose( AM, 90.242335558014219, rtol=1e-3) np.testing.assert_allclose( GM, 89.791654017670112, rtol=1e-3) np.testing.assert_allclose( HN2, 81.234672390673683, rtol=1e-3) np.testing.assert_allclose( HN1, 99.249998725354743, rtol=1e-3) np.testing.assert_almost_equal( HNZ, 183.77223618666929, decimal=1) np.testing.assert_allclose( greater, 99.249998725354743, rtol=1e-3)
def test(): # Test for channel grouping with three unique channels streams = [] # datadir = os.path.join(homedir, '..', 'data', 'knet', 'us2000cnnl') datafiles, origin = read_data_dir("knet", "us2000cnnl", "AOM0031801241951*") for datafile in datafiles: streams += read_knet(datafile) grouped_streams = StreamCollection(streams) assert len(grouped_streams) == 1 assert grouped_streams[0].count() == 3 # Test for channel grouping with more file types datafiles, origin = read_data_dir( "geonet", "us1000778i", "20161113_110313_THZ_20.V2A" ) datafile = datafiles[0] streams += read_geonet(datafile) grouped_streams = StreamCollection(streams) assert len(grouped_streams) == 2 assert grouped_streams[0].count() == 3 assert grouped_streams[1].count() == 3 # Test for warning for one channel streams datafiles, origin = read_data_dir("knet", "us2000cnnl", "AOM0071801241951.UD") datafile = datafiles[0] streams += read_knet(datafile) grouped_streams = StreamCollection(streams) # assert "One channel stream:" in logstream.getvalue() assert len(grouped_streams) == 3 assert grouped_streams[0].count() == 3 assert grouped_streams[1].count() == 3 assert grouped_streams[2].count() == 1
def test_pgv(): datafiles, _ = read_data_dir("geonet", "us1000778i", "20161113_110259_WTMC_20.V2A") datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] pgv_target = {} for trace in stream_v2: vtrace = trace.copy() vtrace.integrate() pgv_target[vtrace.stats["channel"]] = np.abs(vtrace.max()) # we've replaced HN1 etc. with H1 so channel names are not the same as # the original trace pgv_target["H1"] = pgv_target["HN1"] pgv_target["H2"] = pgv_target["HN2"] pgv_target["Z"] = pgv_target["HNZ"] with warnings.catch_warnings(): warnings.simplefilter("ignore") station_summary = StationSummary.from_stream( stream_v2, ["channels", "greater_of_two_horizontals", "gmrotd50"], ["pgv", "sa1.0", "saincorrect"], ) pgv_df = station_summary.pgms.loc["PGV"] HN1 = pgv_df.loc["H1"].Result HN2 = pgv_df.loc["H2"].Result HNZ = pgv_df.loc["Z"].Result np.testing.assert_almost_equal(HN2, pgv_target["H2"]) np.testing.assert_almost_equal(HN1, pgv_target["H1"]) np.testing.assert_almost_equal(HNZ, pgv_target["Z"])
def test_exceptions(): datafiles, _ = read_data_dir( 'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] stream1 = stream_v2.select(channel="HN1") pgms = StationSummary.from_stream(stream1, ['rotd50'], ['pga']).pgms assert np.isnan(pgms[(pgms.IMT == 'PGA') & (pgms.IMC == 'ROTD(50.0)')].Result.iloc[0])
def test_exceptions(): datafiles, _ = read_data_dir( 'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] stream1 = stream_v2.select(channel="HN1") pgms = StationSummary.from_stream(stream1, ['rotd50'], ['pga']).pgms assert np.isnan(pgms.loc['PGA', 'ROTD(50.0)'].Result)
def test_exceptions(): datafiles, _ = read_data_dir("geonet", "us1000778i", "20161113_110259_WTMC_20.V2A") datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] stream1 = stream_v2.select(channel="HN1") pgms = StationSummary.from_stream(stream1, ["rotd50"], ["pga"]).pgms assert np.isnan(pgms.loc["PGA", "ROTD(50.0)"].Result)
def test_gmrotd(): datafiles, _ = read_data_dir( 'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] station_summary = StationSummary.from_stream(stream_v2, ['gmrotd0', 'gmrotd50', 'gmrotd100'], ['pga'])
def test_greater_of_two_horizontals(): datafiles, _ = read_data_dir( 'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] station_summary = StationSummary.from_stream(stream_v2, ['greater_of_two_horizontals'], ['pga']) station = station_summary.pgms[station_summary.pgms.IMT == 'PGA'] greater = station[station.IMC == 'GREATER_OF_TWO_HORIZONTALS'].Result.iloc[0] np.testing.assert_almost_equal(greater, 99.3173469387755, decimal=1)
def test_greater_of_two_horizontals(): datafiles, _ = read_data_dir("geonet", "us1000778i", "20161113_110259_WTMC_20.V2A") datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] station_summary = StationSummary.from_stream( stream_v2, ["greater_of_two_horizontals"], ["pga"]) pgms = station_summary.pgms greater = pgms.loc["PGA", "GREATER_OF_TWO_HORIZONTALS"].Result np.testing.assert_almost_equal(greater, 99.3173469387755, decimal=1)
def test_gmrotd(): datafiles, _ = read_data_dir('geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] station_summary = StationSummary.from_stream( stream_v2, ['gmrotd0', 'gmrotd50', 'gmrotd100'], ['pga']) pgms = station_summary.pgms assert 'GMROTD(50.0)' in pgms.IMC.tolist()
def test_gmrotd(): datafiles, _ = read_data_dir("geonet", "us1000778i", "20161113_110259_WTMC_20.V2A") datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] station_summary = StationSummary.from_stream( stream_v2, ["gmrotd0", "gmrotd50", "gmrotd100"], ["pga"]) pgms = station_summary.pgms assert "GMROTD(50.0)" in pgms.index.get_level_values(1)
def test_greater_of_two_horizontals(): datafiles, _ = read_data_dir('geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] station_summary = StationSummary.from_stream( stream_v2, ['greater_of_two_horizontals'], ['pga']) pgms = station_summary.pgms greater = pgms.loc['PGA', 'GREATER_OF_TWO_HORIZONTALS'].Result np.testing.assert_almost_equal(greater, 99.3173469387755, decimal=1)
def test_gmrotd(): datafiles, _ = read_data_dir( 'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] station_summary = StationSummary.from_stream(stream_v2, ['gmrotd0', 'gmrotd50', 'gmrotd100'], ['pga']) pgms = station_summary.pgms assert 'GMROTD(50.0)' in pgms.IMC.tolist()
def test_exceptions(): datafiles, _ = read_data_dir('geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] stream1 = stream_v2.select(channel="HN1") pgms = StationSummary.from_stream(stream1, ['gmrotd50'], ['pga']).pgms assert np.isnan(pgms.Result.iloc[0]) for trace in stream_v2: stream1.append(trace) pgms = StationSummary.from_stream(stream1, ['gmrotd50'], ['pga']).pgms assert np.isnan(pgms.Result.iloc[0])
def test_exceptions(): datafiles, _ = read_data_dir( 'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] stream1 = stream_v2.select(channel="HN1") pgms = StationSummary.from_stream(stream1, ['gmrotd50'], ['pga']).pgms assert np.isnan(pgms.Result.iloc[0]) for trace in stream_v2: stream1.append(trace) pgms = StationSummary.from_stream(stream1, ['gmrotd50'], ['pga']).pgms assert np.isnan(pgms.Result.iloc[0])
def test_channels(): datafiles, _ = read_data_dir( 'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] station_summary = StationSummary.from_stream(stream_v2, ['channels'], ['pga']) channel = station_summary.pgms[station_summary.pgms.IMT == 'PGA'] np.testing.assert_almost_equal( channel[channel.IMC == 'HN2'].Result.iloc[0], 81.28979591836733, decimal=1) np.testing.assert_almost_equal( channel[channel.IMC == 'HN1'].Result.iloc[0], 99.3173469387755, decimal=1) np.testing.assert_almost_equal( channel[channel.IMC == 'HNZ'].Result.iloc[0], 183.89693877551022, decimal=1)
def test_channels(): datafiles, _ = read_data_dir( 'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] station_summary = StationSummary.from_stream(stream_v2, ['channels'], ['pga']) pgms = station_summary.pgms np.testing.assert_almost_equal( pgms.loc['PGA', 'H2'].Result, 81.28979591836733, decimal=1) np.testing.assert_almost_equal( pgms.loc['PGA', 'H1'].Result, 99.3173469387755, decimal=1) np.testing.assert_almost_equal( pgms.loc['PGA', 'Z'].Result, 183.89693877551022, decimal=1)
def test_channels(): datafiles, _ = read_data_dir("geonet", "us1000778i", "20161113_110259_WTMC_20.V2A") datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] station_summary = StationSummary.from_stream(stream_v2, ["channels"], ["pga"]) pgms = station_summary.pgms np.testing.assert_almost_equal( pgms.loc["PGA", "H2"].Result, 81.28979591836733, decimal=1 ) np.testing.assert_almost_equal( pgms.loc["PGA", "H1"].Result, 99.3173469387755, decimal=1 ) np.testing.assert_almost_equal( pgms.loc["PGA", "Z"].Result, 183.89693877551022, decimal=1 )
def test_end_to_end(): datafiles, _ = read_data_dir('geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile = datafiles[0] target_imcs = np.sort( np.asarray([ 'GREATER_OF_TWO_HORIZONTALS', 'H1', 'H2', 'Z', 'ROTD50.0', 'ROTD100.0' ])) target_imts = np.sort(np.asarray(['SA(1.0)', 'PGA', 'PGV'])) stream = read_geonet(datafile)[0] input_imcs = [ 'greater_of_two_horizontals', 'channels', 'rotd50', 'rotd100', 'invalid' ] input_imts = ['sa1.0', 'PGA', 'pgv', 'invalid'] m = MetricsController(input_imts, input_imcs, stream) test_pgms = [('PGV', 'ROTD(100.0)', 114.24894584734818), ('PGV', 'ROTD(50.0)', 81.55436750525355), ('PGV', 'Z', 37.47740000000001), ('PGV', 'H1', 100.81460000000004), ('PGV', 'H2', 68.4354), ('PGV', 'GREATER_OF_TWO_HORIZONTALS', 100.81460000000004), ('PGA', 'ROTD(100.0)', 100.73875535385548), ('PGA', 'ROTD(50.0)', 91.40178541935455), ('PGA', 'Z', 183.7722361866693), ('PGA', 'H1', 99.24999872535474), ('PGA', 'H2', 81.23467239067368), ('PGA', 'GREATER_OF_TWO_HORIZONTALS', 99.24999872535474), ('SA(1.0)', 'ROTD(100.0)', 146.9023350124098), ('SA(1.0)', 'ROTD(50.0)', 106.03202302692158), ('SA(1.0)', 'Z', 27.74118995438756), ('SA(1.0)', 'H1', 136.25041187387063), ('SA(1.0)', 'H2', 84.69296738413021), ('SA(1.0)', 'GREATER_OF_TWO_HORIZONTALS', 136.25041187387063)] pgms = m.pgms assert len(pgms['IMT'].tolist()) == len(test_pgms) for target in test_pgms: target_imt = target[0] target_imc = target[1] value = target[2] sub_imt = pgms.loc[pgms['IMT'] == target_imt] df = sub_imt.loc[sub_imt['IMC'] == target_imc] assert len(df['IMT'].tolist()) == 1 np.testing.assert_array_almost_equal(df['Result'].tolist()[0], value, decimal=10)
def test_sa(): datafiles, _ = read_data_dir('geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] sa_target = {} for trace in stream_v2: vtrace = trace.copy() vtrace.integrate() sa_target[vtrace.stats['channel']] = np.abs(vtrace.max()) with warnings.catch_warnings(): warnings.simplefilter("ignore") station_summary = StationSummary.from_stream( stream_v2, ['greater_of_two_horizontals', 'gmrotd50', 'channels'], ['sa1.0', 'saincorrect']) assert 'SA(1.0)' in station_summary.pgms
def test_end_to_end(): datafiles, _ = read_data_dir( 'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile = datafiles[0] target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS', 'HN1', 'HN2', 'HNZ', 'ROTD50.0', 'ROTD100.0'])) target_imts = np.sort(np.asarray(['SA(1.0)', 'PGA', 'PGV'])) stream = read_geonet(datafile)[0] input_imcs = ['greater_of_two_horizontals', 'channels', 'rotd50', 'rotd100', 'invalid'] input_imts = ['sa1.0', 'PGA', 'pgv', 'invalid'] m = MetricsController(input_imts, input_imcs, stream) test_pgms = [ ('PGV', 'ROTD(100.0)', 114.24894584734818), ('PGV', 'ROTD(50.0)', 81.55436750525355), ('PGV', 'HNZ', 37.47740000000001), ('PGV', 'HN1', 100.81460000000004), ('PGV', 'HN2', 68.4354), ('PGV', 'GREATER_OF_TWO_HORIZONTALS', 100.81460000000004), ('PGA', 'ROTD(100.0)', 100.73875535385548), ('PGA', 'ROTD(50.0)', 91.40178541935455), ('PGA', 'HNZ', 183.7722361866693), ('PGA', 'HN1', 99.24999872535474), ('PGA', 'HN2', 81.23467239067368), ('PGA', 'GREATER_OF_TWO_HORIZONTALS', 99.24999872535474), ('SA(1.0)', 'ROTD(100.0)', 146.9023350124098), ('SA(1.0)', 'ROTD(50.0)', 106.03202302692158), ('SA(1.0)', 'HNZ', 27.74118995438756), ('SA(1.0)', 'HN1', 136.25041187387063), ('SA(1.0)', 'HN2', 84.69296738413021), ('SA(1.0)', 'GREATER_OF_TWO_HORIZONTALS', 136.25041187387063) ] pgms = m.pgms assert len(pgms['IMT'].tolist()) == len(test_pgms) for target in test_pgms: target_imt = target[0] target_imc = target[1] value = target[2] sub_imt = pgms.loc[pgms['IMT'] == target_imt] df = sub_imt.loc[sub_imt['IMC'] == target_imc] assert len(df['IMT'].tolist()) == 1 np.testing.assert_array_almost_equal(df['Result'].tolist()[0], value, decimal=10)
def test_end_to_end(): datafiles, _ = read_data_dir("geonet", "us1000778i", "20161113_110259_WTMC_20.V2A") datafile = datafiles[0] stream = read_geonet(datafile)[0] input_imcs = [ "greater_of_two_horizontals", "channels", "rotd50", "rotd100", "invalid", ] input_imts = ["sa1.0", "PGA", "pgv", "invalid"] m = MetricsController(input_imts, input_imcs, stream, config=config) test_pgms = [ ("PGV", "ROTD(100.0)", 114.24894584734818), ("PGV", "ROTD(50.0)", 81.55436750525355), ("PGV", "Z", 37.47740000000001), ("PGV", "H1", 100.81460000000004), ("PGV", "H2", 68.4354), ("PGV", "GREATER_OF_TWO_HORIZONTALS", 100.81460000000004), ("PGA", "ROTD(100.0)", 100.73875535385548), ("PGA", "ROTD(50.0)", 91.40178541935455), ("PGA", "Z", 183.7722361866693), ("PGA", "H1", 99.24999872535474), ("PGA", "H2", 81.23467239067368), ("PGA", "GREATER_OF_TWO_HORIZONTALS", 99.24999872535474), ("SA(1.000)", "ROTD(100.0)", 146.9023350124098), ("SA(1.000)", "ROTD(50.0)", 106.03202302692158), ("SA(1.000)", "Z", 27.74118995438756), ("SA(1.000)", "H1", 136.25041187387063), ("SA(1.000)", "H2", 84.69296738413021), ("SA(1.000)", "GREATER_OF_TWO_HORIZONTALS", 136.25041187387063), ] pgms = m.pgms assert len(pgms) == len(test_pgms) for target in test_pgms: target_imt = target[0] target_imc = target[1] value = target[2] df = pgms.loc[target_imt, target_imc] assert len(df) == 1 np.testing.assert_array_almost_equal(df["Result"], value, decimal=10)
def test_sa(): datafiles, _ = read_data_dir("geonet", "us1000778i", "20161113_110259_WTMC_20.V2A") datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] sa_target = {} for trace in stream_v2: vtrace = trace.copy() vtrace.integrate() sa_target[vtrace.stats["channel"]] = np.abs(vtrace.max()) with warnings.catch_warnings(): warnings.simplefilter("ignore") station_summary = StationSummary.from_stream( stream_v2, [ "greater_of_two_horizontals", "geometric_mean", "rotd50", "arithmetic_mean", "rotd100", "gmrotd50", "channels", ], ["sa1.0", "sa0.01", "saincorrect"], ) pgms = station_summary.pgms assert "SA(1.000)" in pgms.index.get_level_values(0) np.testing.assert_allclose( pgms.loc["SA(1.000)", "ARITHMETIC_MEAN"].Result, 110.47168962900042 ) np.testing.assert_allclose( pgms.loc["SA(1.000)", "GEOMETRIC_MEAN"].Result, 107.42183990654802 ) np.testing.assert_allclose( pgms.loc["SA(1.000)", "ROTD(50.0)"].Result, 106.03202302692158 ) np.testing.assert_allclose( pgms.loc["SA(1.000)", "ROTD(100.0)"].Result, 146.90233501240979 ) # Check high frequency SA np.testing.assert_allclose(pgms.loc["SA(0.010)", "ROTD(100.0)"].Result, 120.187153) np.testing.assert_allclose(pgms.loc["SA(0.010)", "GMROTD(50.0)"].Result, 95.355300) np.testing.assert_allclose(pgms.loc["SA(0.010)", "H1"].Result, 106.716122) np.testing.assert_allclose(pgms.loc["SA(0.010)", "H2"].Result, 90.497883) np.testing.assert_allclose(pgms.loc["SA(0.010)", "GMROTD(50.0)"].Result, 95.355300)
def test_exceptions(): ddir = os.path.join("data", "testdata", "geonet") homedir = pkg_resources.resource_filename("gmprocess", ddir) datafile_v2 = os.path.join(homedir, "us1000778i", "20161113_110259_WTMC_20.V2A") stream_v2 = read_geonet(datafile_v2)[0] # Check for origin Error passed = True try: m = MetricsController("pga", "radial_transverse", stream_v2, config=config) except PGMException as e: passed = False assert passed == False # -------- Horizontal Channel Errors ----------- # Check for horizontal passthrough gm st2 = stream_v2.select(component="[N1]") st3 = stream_v2.select(component="Z") st1 = StationStream([st2[0], st3[0]]) passed = True m = MetricsController("pga", "geometric_mean", st1, config=config) pgm = m.pgms result = pgm["Result"].tolist()[0] assert np.isnan(result) # Check for horizontal passthrough rotd50 m = MetricsController("pga", "rotd50", st1, config=config) pgm = m.pgms result = pgm["Result"].tolist()[0] assert np.isnan(result) # Check for horizontal passthrough gmrotd50 m = MetricsController("pga", "gmrotd50", st1, config=config) pgm = m.pgms result = pgm["Result"].tolist()[0] assert np.isnan(result) # No horizontal channels try: m = MetricsController("sa3.0", "channels", st3, config=config) except PGMException as e: passed = False assert passed == False
def test_pgv(): datafiles, _ = read_data_dir('geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] pgv_target = {} for trace in stream_v2: vtrace = trace.copy() vtrace.integrate() pgv_target[vtrace.stats['channel']] = np.abs(vtrace.max()) with warnings.catch_warnings(): warnings.simplefilter("ignore") station_summary = StationSummary.from_stream( stream_v2, ['channels', 'greater_of_two_horizontals', 'gmrotd50'], ['pgv', 'sa1.0', 'saincorrect']) station_dict = station_summary.pgms['PGV'] np.testing.assert_almost_equal(station_dict['HN2'], pgv_target['HN2']) np.testing.assert_almost_equal(station_dict['HN1'], pgv_target['HN1']) np.testing.assert_almost_equal(station_dict['HNZ'], pgv_target['HNZ'])
def test_exceptions(): ddir = os.path.join('data', 'testdata', 'geonet') homedir = pkg_resources.resource_filename('gmprocess', ddir) datafile_v2 = os.path.join(homedir, 'us1000778i', '20161113_110259_WTMC_20.V2A') stream_v2 = read_geonet(datafile_v2)[0] # Check for origin Error passed = True try: m = MetricsController('pga', 'radial_transverse', stream_v2, config=config) except PGMException as e: passed = False assert passed == False # -------- Horizontal Channel Errors ----------- # Check for horizontal passthrough gm st2 = stream_v2.select(component='[N1]') st3 = stream_v2.select(component='Z') st1 = StationStream([st2[0], st3[0]]) passed = True m = MetricsController('pga', 'geometric_mean', st1, config=config) pgm = m.pgms result = pgm['Result'].tolist()[0] assert np.isnan(result) # Check for horizontal passthrough rotd50 m = MetricsController('pga', 'rotd50', st1, config=config) pgm = m.pgms result = pgm['Result'].tolist()[0] assert np.isnan(result) # Check for horizontal passthrough gmrotd50 m = MetricsController('pga', 'gmrotd50', st1, config=config) pgm = m.pgms result = pgm['Result'].tolist()[0] assert np.isnan(result) # No horizontal channels try: m = MetricsController('sa3.0', 'channels', st3, config=config) except PGMException as e: passed = False assert passed == False
def test_pga(): datafiles, _ = read_data_dir("geonet", "us1000778i", "20161113_110259_WTMC_20.V2A") datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] with warnings.catch_warnings(): warnings.simplefilter("ignore") station_summary = StationSummary.from_stream( stream_v2, [ "channels", "greater_of_two_horizontals", "gmrotd0", "gmrotd50", "gmrotd100", "rotd50", "geometric_mean", "arithmetic_mean", ], ["pga", "sa1.0", "saincorrect"], ) pga_df = station_summary.pgms.loc["PGA"] AM = pga_df.loc["ARITHMETIC_MEAN"].Result GM = pga_df.loc["GEOMETRIC_MEAN"].Result HN1 = pga_df.loc["H1"].Result HN2 = pga_df.loc["H2"].Result HNZ = pga_df.loc["Z"].Result gmrotd0 = pga_df.loc["GMROTD(0.0)"].Result gmrotd50 = pga_df.loc["GMROTD(50.0)"].Result gmrotd100 = pga_df.loc["GMROTD(100.0)"].Result rotd50 = pga_df.loc["ROTD(50.0)"].Result greater = pga_df.loc["GREATER_OF_TWO_HORIZONTALS"].Result np.testing.assert_allclose(AM, 90.242335558014219) np.testing.assert_allclose(GM, 89.791654017670112) np.testing.assert_allclose(HN2, 81.234672390673683) np.testing.assert_allclose(HN1, 99.249998725354743) np.testing.assert_almost_equal(HNZ, 183.77223618666929) np.testing.assert_allclose(greater, 99.249998725354743) np.testing.assert_allclose(gmrotd0, 83.487703753812113) np.testing.assert_allclose(gmrotd50, 86.758642638162982) np.testing.assert_allclose(gmrotd100, 89.791654017670112) np.testing.assert_allclose(rotd50, 91.401785419354567)
def test_pga(): datafiles, _ = read_data_dir('geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] with warnings.catch_warnings(): warnings.simplefilter("ignore") station_summary = StationSummary.from_stream(stream_v2, [ 'channels', 'greater_of_two_horizontals', 'gmrotd50', 'gmrotd100', 'gmrotd0' ], ['pga', 'sa1.0', 'saincorrect']) pga_df = station_summary.pgms[station_summary.pgms.IMT == 'PGA'] HN1 = pga_df[pga_df.IMC == 'H1'].Result.iloc[0] HN2 = pga_df[pga_df.IMC == 'H2'].Result.iloc[0] HNZ = pga_df[pga_df.IMC == 'Z'].Result.iloc[0] greater = pga_df[pga_df.IMC == 'GREATER_OF_TWO_HORIZONTALS'].Result.iloc[0] np.testing.assert_almost_equal(HN2, 81.28979591836733, decimal=1) np.testing.assert_almost_equal(HN1, 99.3173469387755, decimal=1) np.testing.assert_almost_equal(HNZ, 183.89693877551022, decimal=1) np.testing.assert_almost_equal(greater, 99.3173469387755, decimal=1)
def test_exceptions(): ddir = os.path.join('data', 'testdata', 'geonet') homedir = pkg_resources.resource_filename('gmprocess', ddir) datafile_v2 = os.path.join( homedir, 'us1000778i', '20161113_110259_WTMC_20.V2A') stream_v2 = read_geonet(datafile_v2)[0] # Check for origin Error passed = True try: m = MetricsController('pga', 'radial_transverse', stream_v2) except PGMException as e: passed = False assert passed == False # -------- Horizontal Channel Errors ----------- # Check for horizontal passthrough gm st2 = stream_v2.select(component='[N1]') st3 = stream_v2.select(component='Z') st1 = StationStream([st2[0], st3[0]]) passed = True m = MetricsController('pga', 'geometric_mean', st1) pgm = m.pgms result = pgm['Result'].tolist()[0] assert np.isnan(result) # Check for horizontal passthrough rotd50 m = MetricsController('pga', 'rotd50', st1) pgm = m.pgms result = pgm['Result'].tolist()[0] assert np.isnan(result) # Check for horizontal passthrough gmrotd50 m = MetricsController('pga', 'gmrotd50', st1) pgm = m.pgms result = pgm['Result'].tolist()[0] assert np.isnan(result) # No horizontal channels try: m = MetricsController('sa3.0', 'channels', st3) except PGMException as e: passed = False assert passed == False
def test(): homedir = os.path.dirname(os.path.abspath( __file__)) # where is this script? # Test for channel grouping with three unique channels streams = [] # datadir = os.path.join(homedir, '..', 'data', 'knet', 'us2000cnnl') datafiles, origin = read_data_dir('knet', 'us2000cnnl', 'AOM0031801241951*') for datafile in datafiles: streams += read_knet(datafile) grouped_streams = StreamCollection(streams) assert len(grouped_streams) == 1 assert grouped_streams[0].count() == 3 # Test for channel grouping with more file types datafiles, origin = read_data_dir('geonet', 'us1000778i', '20161113_110313_THZ_20.V2A') datafile = datafiles[0] streams += read_geonet(datafile) grouped_streams = StreamCollection(streams) assert len(grouped_streams) == 2 assert grouped_streams[0].count() == 3 assert grouped_streams[1].count() == 3 # Test for warning for one channel streams datafiles, origin = read_data_dir( 'knet', 'us2000cnnl', 'AOM0071801241951.UD') datafile = datafiles[0] streams += read_knet(datafile) grouped_streams = StreamCollection(streams) # assert "One channel stream:" in logstream.getvalue() assert len(grouped_streams) == 3 assert grouped_streams[0].count() == 3 assert grouped_streams[1].count() == 3 assert grouped_streams[2].count() == 1
def test_pgv(): datafiles, _ = read_data_dir( 'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] pgv_target = {} for trace in stream_v2: vtrace = trace.copy() vtrace.integrate() pgv_target[vtrace.stats['channel']] = np.abs(vtrace.max()) with warnings.catch_warnings(): warnings.simplefilter("ignore") station_summary = StationSummary.from_stream(stream_v2, ['channels', 'greater_of_two_horizontals', 'gmrotd50'], ['pgv', 'sa1.0', 'saincorrect']) pgv_df = station_summary.pgms[station_summary.pgms.IMT == 'PGV'] HN1 = pgv_df[pgv_df.IMC == 'HN1'].Result.iloc[0] HN2 = pgv_df[pgv_df.IMC == 'HN2'].Result.iloc[0] HNZ = pgv_df[pgv_df.IMC == 'HNZ'].Result.iloc[0] np.testing.assert_almost_equal(HN2, pgv_target['HN2']) np.testing.assert_almost_equal(HN1, pgv_target['HN1']) np.testing.assert_almost_equal(HNZ, pgv_target['HNZ'])
def test_pga(): datafiles, _ = read_data_dir( 'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile_v2 = datafiles[0] stream_v2 = read_geonet(datafile_v2)[0] with warnings.catch_warnings(): warnings.simplefilter("ignore") station_summary = StationSummary.from_stream(stream_v2, ['channels', 'greater_of_two_horizontals', 'gmrotd50', 'gmrotd100', 'gmrotd0'], ['pga', 'sa1.0', 'saincorrect']) pga_df = station_summary.pgms[station_summary.pgms.IMT == 'PGA'] HN1 = pga_df[pga_df.IMC == 'HN1'].Result.iloc[0] HN2 = pga_df[pga_df.IMC == 'HN2'].Result.iloc[0] HNZ = pga_df[pga_df.IMC == 'HNZ'].Result.iloc[0] greater = pga_df[pga_df.IMC == 'GREATER_OF_TWO_HORIZONTALS'].Result.iloc[0] np.testing.assert_almost_equal( HN2, 81.28979591836733, decimal=1) np.testing.assert_almost_equal( HN1, 99.3173469387755, decimal=1) np.testing.assert_almost_equal( HNZ, 183.89693877551022, decimal=1) np.testing.assert_almost_equal(greater, 99.3173469387755, decimal=1)
def retrieveData(self, event_dict): """Retrieve data from GeoNet FTP, turn into StreamCollection. Args: event (dict): Best dictionary matching input event, fields as above in return of getMatchingEvents(). Returns: StreamCollection: StreamCollection object. """ rawdir = self.rawdir if self.rawdir is None: rawdir = tempfile.mkdtemp() else: if not os.path.isdir(rawdir): os.makedirs(rawdir) etime = event_dict['time'] neturl = GEOBASE.replace('[YEAR]', str(etime.year)) monthstr = etime.strftime('%m_%b') neturl = neturl.replace('[MONTH]', monthstr) urlparts = urllib.parse.urlparse(neturl) ftp = ftplib.FTP(urlparts.netloc) ftp.login() # anonymous dirparts = urlparts.path.strip('/').split('/') for d in dirparts: try: ftp.cwd(d) except ftplib.error_perm as msg: raise Exception(msg) # cd to the desired output folder os.chdir(rawdir) datafiles = [] # we cannot depend on the time given to us by the GeoNet catalog to match # the directory name on the FTP site, so we must do a secondary matching. dirlist = ftp.nlst() fname = _match_closest_time(etime, dirlist) # create the event folder name from the time we got above # fname = etime.strftime('%Y-%m-%d_%H%M%S') try: ftp.cwd(fname) except ftplib.error_perm: msg = 'Could not find an FTP data folder called "%s". Returning.' % ( urllib.parse.urljoin(neturl, fname)) raise Exception(msg) dirlist = ftp.nlst() for volume in dirlist: if volume.startswith('Vol1'): ftp.cwd(volume) if 'data' not in ftp.nlst(): ftp.cwd('..') continue ftp.cwd('data') flist = ftp.nlst() for ftpfile in flist: if not ftpfile.endswith('V1A'): continue localfile = os.path.join(os.getcwd(), ftpfile) if localfile in datafiles: continue datafiles.append(localfile) f = open(localfile, 'wb') logging.info('Retrieving remote file %s...\n' % ftpfile) ftp.retrbinary('RETR %s' % ftpfile, f.write) f.close() ftp.cwd('..') ftp.cwd('..') ftp.quit() streams = [] for dfile in datafiles: logging.info('Reading GeoNet file %s...' % dfile) try: tstreams = read_geonet(dfile) streams += tstreams except Exception as e: fmt = 'Failed to read GeoNet file "%s" due to error "%s". Continuing.' tpl = (dfile, str(e)) logging.warn(fmt % tpl) if self.rawdir is None: shutil.rmtree(rawdir) stream_collection = StreamCollection(streams=streams, drop_non_free=self.drop_non_free) return stream_collection
def test_stationsummary(): datafiles, _ = read_data_dir( 'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile = datafiles[0] origin = Origin(latitude=42.6925, longitude=173.021944) target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS', 'HN1', 'HN2', 'HNZ', 'ROTD(50.0)', 'ROTD(100.0)'])) target_imts = np.sort(np.asarray(['SA(1.0)', 'PGA', 'PGV'])) stream = read_geonet(datafile)[0] with warnings.catch_warnings(): warnings.simplefilter("ignore") stream_summary = StationSummary.from_stream( stream, ['greater_of_two_horizontals', 'channels', 'rotd50', 'rotd100', 'invalid'], ['sa1.0', 'PGA', 'pgv', 'invalid'], origin) original_stream = stream_summary.stream stream_summary.stream = [] final_stream = stream_summary.stream assert original_stream == final_stream original_code = stream_summary.station_code np.testing.assert_array_equal(np.sort(stream_summary.components), target_imcs) np.testing.assert_array_equal(np.sort(stream_summary.imts), target_imts) np.testing.assert_almost_equal(stream_summary.get_pgm('PGA', 'HN1'), 99.3173469387755, decimal=1) target_available = np.sort(np.asarray([ 'greater_of_two_horizontals', 'geometric_mean', 'arithmetic_mean', 'channels', 'gmrotd', 'rotd', 'quadratic_mean', 'radial_transverse'])) imcs = stream_summary.available_imcs np.testing.assert_array_equal(np.sort(imcs), target_available) target_available = np.sort(np.asarray(['pga', 'pgv', 'sa', 'arias', 'fas'])) imts = stream_summary.available_imts np.testing.assert_array_equal(np.sort(imts), target_available) test_pgms = { 'PGV': { 'ROTD(100.0)': 114.24894584734818, 'ROTD(50.0)': 81.55436750525355, 'HNZ': 37.47740000000001, 'HN1': 100.81460000000004, 'HN2': 68.4354, 'GREATER_OF_TWO_HORIZONTALS': 100.81460000000004}, 'PGA': { 'ROTD(100.0)': 100.73875535385548, 'ROTD(50.0)': 91.40178541935455, 'HNZ': 183.7722361866693, 'HN1': 99.24999872535474, 'HN2': 81.23467239067368, 'GREATER_OF_TWO_HORIZONTALS': 99.24999872535474}, 'SA(1.0)': { 'ROTD(100.0)': 146.9023350124098, 'ROTD(50.0)': 106.03202302692158, 'HNZ': 27.74118995438756, 'HN1': 136.25041187387063, 'HN2': 84.69296738413021, 'GREATER_OF_TWO_HORIZONTALS': 136.25041187387063} } pgms = stream_summary.pgms for imt_str in test_pgms: for imc_str in test_pgms[imt_str]: imt = pgms.loc[pgms['IMT'] == imt_str] imc = imt.loc[imt['IMC'] == imc_str] results = imc.Result.tolist() assert len(results) == 1 np.testing.assert_almost_equal(results[0], test_pgms[imt_str][imc_str], decimal=10) # Test with fas stream = read_geonet(datafile)[0] stream_summary = StationSummary.from_stream( stream, ['greater_of_two_horizontals', 'channels', 'geometric_mean'], ['sa1.0', 'PGA', 'pgv', 'fas2.0']) target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS', 'HN1', 'HN2', 'HNZ', 'GEOMETRIC_MEAN'])) target_imts = np.sort(np.asarray(['SA(1.0)', 'PGA', 'PGV', 'FAS(2.0)'])) np.testing.assert_array_equal(np.sort(stream_summary.components), target_imcs) np.testing.assert_array_equal(np.sort(stream_summary.imts), target_imts) # Test config use stream = read_geonet(datafile)[0] stream_summary = StationSummary.from_config(stream) target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS', 'HN1', 'HN2', 'HNZ'])) target_imts = np.sort(np.asarray(['SA(1.0)', 'SA(2.0)', 'SA(3.0)', 'SA(0.3)', 'PGA', 'PGV', 'FAS(1.0)', 'FAS(2.0)', 'FAS(3.0)', 'FAS(0.3)'])) assert(stream_summary.smoothing == 'konno_ohmachi') assert(stream_summary.bandwidth == 20.0) assert(stream_summary.damping == 0.05) # test XML output stream = read_geonet(datafile)[0] imclist = ['greater_of_two_horizontals', 'channels', 'rotd50.0', 'rotd100.0'] imtlist = ['sa1.0', 'PGA', 'pgv', 'fas2.0', 'arias'] stream_summary = StationSummary.from_stream(stream, imclist, imtlist) xmlstr = stream_summary.getMetricXML() print(xmlstr.decode('utf-8')) stream2 = StationSummary.fromMetricXML(xmlstr) cmp1 = np.sort(['GREATER_OF_TWO_HORIZONTALS', 'HN1', 'HN2', 'HNZ', 'ROTD100.0', 'ROTD50.0']) cmp2 = np.sort(stream2.components) np.testing.assert_array_equal(cmp1, cmp2) imt1 = np.sort(stream_summary.imts) imt2 = np.sort(stream2.imts) np.testing.assert_array_equal(imt1, imt2)
def test(): dpath_2016 = os.path.join('data', 'testdata', 'geonet', 'us1000778i') datadir_2016 = pkg_resources.resource_filename('gmprocess', dpath_2016) dpath_2018 = os.path.join('data', 'testdata', 'geonet', 'nz2018p115908') datadir_2018 = pkg_resources.resource_filename('gmprocess', dpath_2018) # first test a non-geonet file try: assert is_geonet(os.path.abspath(__file__)) except AssertionError: assert 1 == 1 # loop over some events that test different properties comps = [('20161113_110259_WTMC_20.V1A', 'V1 file w/ remainder row', -1102.6, 922.9, 3154.1), ('20161113_110259_WTMC_20.V2A', 'V2 file w/ remainder row', -973.31, 796.64, 1802.19), ('20161113_110313_THZ_20.V1A', 'V1 file w/out remainder row', 39.97, 48.46, -24.91), ] for comp in comps: fname = comp[0] desc = comp[1] test_vals = comp[2:] print('Testing %s, %s...' % (fname, desc)) geonet_file = os.path.join(datadir_2016, fname) assert is_geonet(geonet_file) stream = read_geonet(geonet_file)[0] np.testing.assert_almost_equal( stream[0].max(), test_vals[0], decimal=1) np.testing.assert_almost_equal( stream[1].max(), test_vals[1], decimal=1) np.testing.assert_almost_equal( stream[2].max(), test_vals[2], decimal=1) comps = [('20180212_211557_WPWS_20.V2A', 'V2 file w/out remainder row', -4.16, -19.40, -2.73)] for comp in comps: fname = comp[0] desc = comp[1] test_vals = comp[2:] print('Testing %s, %s...' % (fname, desc)) geonet_file = os.path.join(datadir_2018, fname) assert is_geonet(geonet_file) stream = read_geonet(geonet_file)[0] np.testing.assert_almost_equal( stream[0].max(), test_vals[0], decimal=1) np.testing.assert_almost_equal( stream[1].max(), test_vals[1], decimal=1) np.testing.assert_almost_equal( stream[2].max(), test_vals[2], decimal=1) # test the velocity values from one of the V2 files comps = [('20180212_211557_WPWS_20.V2A', 0.165, 0.509, -0.091)] for comp in comps: geonet_file = os.path.join(datadir_2018, comp[0]) stream = read_geonet(geonet_file)[0] traces = [] for trace in stream: vtrace = trace.copy() vtrace.detrend('linear') vtrace.detrend('demean') vtrace.taper(max_percentage=0.05, type='cosine') vtrace.filter('highpass', freq=FILTER_FREQ, zerophase=True, corners=CORNERS) vtrace.detrend('linear') vtrace.detrend('demean') vtrace.integrate() traces.append(vtrace) assert traces[0].max() / comp[1] >= 0.95 assert traces[1].max() / comp[2] >= 0.95 assert traces[2].max() / comp[3] >= 0.95
def test_controller(): datafiles, event = read_data_dir('geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile = datafiles[0] input_imts = [ 'pgv', 'pga', 'sa2', 'sa1.0', 'sa0.3', 'fas2', 'fas1.0', 'fas0.3', 'arias', 'invalid' ] input_imcs = [ 'rotd50', 'rotd100.0', 'gmrotd50', 'gmrotd100.0', 'radial_transverse', 'geometric_mean', 'arithmetic_mean', 'channels', 'greater_of_two_horizontals', 'invalid', 'quadratic_mean' ] stream_v2 = read_geonet(datafile)[0] # Testing for acceleration -------------------------- m1 = MetricsController(input_imts, input_imcs, stream_v2, event=event) pgms = m1.pgms # testing for pga, pgv, sa target_imcs = [ 'ROTD(50.0)', 'ROTD(100.0)', 'GMROTD(50.0)', 'GMROTD(100.0)', 'HNR', 'HNT', 'GEOMETRIC_MEAN', 'ARITHMETIC_MEAN', 'H1', 'H2', 'Z', 'GREATER_OF_TWO_HORIZONTALS', 'QUADRATIC_MEAN' ] for col in ['PGA', 'PGV', 'SA(1.000)', 'SA(2.000)', 'SA(0.300)']: imt = pgms.loc[pgms['IMT'] == col] imcs = imt['IMC'].tolist() assert len(imcs) == len(target_imcs) np.testing.assert_array_equal(np.sort(imcs), np.sort(target_imcs)) # testing for fas for col in ['FAS(1.000)', 'FAS(2.000)', 'FAS(0.300)']: imt = pgms.loc[pgms['IMT'] == col] imcs = imt['IMC'].tolist() assert len(imcs) == 3 np.testing.assert_array_equal( np.sort(imcs), ['ARITHMETIC_MEAN', 'GEOMETRIC_MEAN', 'QUADRATIC_MEAN']) # testing for arias imt = pgms.loc[pgms['IMT'] == 'ARIAS'] imcs = imt['IMC'].tolist() assert len(imcs) == 1 np.testing.assert_array_equal(np.sort(imcs), ['ARITHMETIC_MEAN']) _validate_steps(m1.step_sets, 'acc') # Testing for Velocity -------------------------- for trace in stream_v2: trace.stats.standard.units = 'vel' m = MetricsController(input_imts, input_imcs, stream_v2, event=event) pgms = m.pgms # testing for pga, pgv, sa target_imcs = [ 'ROTD(50.0)', 'ROTD(100.0)', 'GMROTD(50.0)', 'GMROTD(100.0)', 'HNR', 'HNT', 'GEOMETRIC_MEAN', 'ARITHMETIC_MEAN', 'QUADRATIC_MEAN', 'H1', 'H2', 'Z', 'GREATER_OF_TWO_HORIZONTALS' ] for col in ['PGA', 'PGV', 'SA(1.000)', 'SA(2.000)', 'SA(0.300)']: imt = pgms.loc[pgms['IMT'] == col] imcs = imt['IMC'].tolist() assert len(imcs) == len(target_imcs) np.testing.assert_array_equal(np.sort(imcs), np.sort(target_imcs)) # testing for fas for col in ['FAS(1.000)', 'FAS(2.000)', 'FAS(0.300)']: imt = pgms.loc[pgms['IMT'] == col] imcs = imt['IMC'].tolist() assert len(imcs) == 3 np.testing.assert_array_equal( np.sort(imcs), ['ARITHMETIC_MEAN', 'GEOMETRIC_MEAN', 'QUADRATIC_MEAN']) # testing for arias imt = pgms.loc[pgms['IMT'] == 'ARIAS'] imcs = imt['IMC'].tolist() assert len(imcs) == 1 np.testing.assert_array_equal(np.sort(imcs), ['ARITHMETIC_MEAN']) _validate_steps(m.step_sets, 'vel')
def test_controller(): datafiles, event = read_data_dir( 'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A') datafile = datafiles[0] input_imts = ['pgv', 'pga', 'sa2', 'sa1.0', 'sa0.3', 'fas2', 'fas1.0', 'fas0.3', 'arias', 'invalid'] input_imcs = ['rotd50', 'rotd100.0', 'gmrotd50', 'gmrotd100.0', 'radial_transverse', 'geometric_mean', 'arithmetic_mean', 'channels', 'greater_of_two_horizontals', 'invalid', 'quadratic_mean'] stream_v2 = read_geonet(datafile)[0] # Testing for acceleration -------------------------- m1 = MetricsController(input_imts, input_imcs, stream_v2, event=event) pgms = m1.pgms # testing for pga, pgv, sa target_imcs = ['ROTD(50.0)', 'ROTD(100.0)', 'GMROTD(50.0)', 'GMROTD(100.0)', 'HNR', 'HNT', 'GEOMETRIC_MEAN', 'ARITHMETIC_MEAN', 'HN1', 'HN2', 'HNZ', 'GREATER_OF_TWO_HORIZONTALS', 'QUADRATIC_MEAN'] for col in ['PGA', 'PGV', 'SA(1.0)', 'SA(2.0)', 'SA(0.3)']: imt = pgms.loc[pgms['IMT'] == col] imcs = imt['IMC'].tolist() assert len(imcs) == len(target_imcs) np.testing.assert_array_equal(np.sort(imcs), np.sort(target_imcs)) # testing for fas for col in ['FAS(1.0)', 'FAS(2.0)', 'FAS(0.3)']: imt = pgms.loc[pgms['IMT'] == col] imcs = imt['IMC'].tolist() assert len(imcs) == 3 np.testing.assert_array_equal(np.sort(imcs), ['ARITHMETIC_MEAN', 'GEOMETRIC_MEAN', 'QUADRATIC_MEAN']) # testing for arias imt = pgms.loc[pgms['IMT'] == 'ARIAS'] imcs = imt['IMC'].tolist() assert len(imcs) == 1 np.testing.assert_array_equal(np.sort(imcs), ['ARITHMETIC_MEAN']) _validate_steps(m1.step_sets, 'acc') # Testing for Velocity -------------------------- for trace in stream_v2: trace.stats.standard.units = 'vel' m = MetricsController(input_imts, input_imcs, stream_v2, event=event) pgms = m.pgms # testing for pga, pgv, sa target_imcs = ['ROTD(50.0)', 'ROTD(100.0)', 'GMROTD(50.0)', 'GMROTD(100.0)', 'HNR', 'HNT', 'GEOMETRIC_MEAN', 'ARITHMETIC_MEAN', 'QUADRATIC_MEAN', 'HN1', 'HN2', 'HNZ', 'GREATER_OF_TWO_HORIZONTALS'] for col in ['PGA', 'PGV', 'SA(1.0)', 'SA(2.0)', 'SA(0.3)']: imt = pgms.loc[pgms['IMT'] == col] imcs = imt['IMC'].tolist() assert len(imcs) == len(target_imcs) np.testing.assert_array_equal(np.sort(imcs), np.sort(target_imcs)) # testing for fas for col in ['FAS(1.0)', 'FAS(2.0)', 'FAS(0.3)']: imt = pgms.loc[pgms['IMT'] == col] imcs = imt['IMC'].tolist() assert len(imcs) == 3 np.testing.assert_array_equal(np.sort(imcs), ['ARITHMETIC_MEAN', 'GEOMETRIC_MEAN', 'QUADRATIC_MEAN']) # testing for arias imt = pgms.loc[pgms['IMT'] == 'ARIAS'] imcs = imt['IMC'].tolist() assert len(imcs) == 1 np.testing.assert_array_equal(np.sort(imcs), ['ARITHMETIC_MEAN']) _validate_steps(m.step_sets, 'vel')