def test_corner_frequencies(): # Default config has 'constant' corner frequency method, so the need # here is to force the 'snr' method. data_files, origin = read_data_dir('geonet', 'us1000778i', '*.V1A') streams = [] for f in data_files: streams += read_data(f) sc = StreamCollection(streams) config = get_config() window_conf = config['windows'] processed_streams = sc.copy() for st in processed_streams: if st.passed: # Estimate noise/signal split time event_time = origin.time event_lon = origin.longitude event_lat = origin.latitude st = signal_split(st, origin) # Estimate end of signal end_conf = window_conf['signal_end'] event_mag = origin.magnitude print(st) st = signal_end( st, event_time=event_time, event_lon=event_lon, event_lat=event_lat, event_mag=event_mag, **end_conf ) wcheck_conf = window_conf['window_checks'] st = window_checks( st, min_noise_duration=wcheck_conf['min_noise_duration'], min_signal_duration=wcheck_conf['min_signal_duration'] ) pconfig = config['processing'] # Run SNR check # I think we don't do this anymore. test = [ d for d in pconfig if list(d.keys())[0] == 'compute_snr' ] snr_config = test[0]['compute_snr'] for stream in processed_streams: stream = compute_snr( stream, **snr_config ) # Run get_corner_frequencies test = [ d for d in pconfig if list(d.keys())[0] == 'get_corner_frequencies' ] cf_config = test[0]['get_corner_frequencies'] snr_config = cf_config['snr'] lp = [] hp = [] for stream in processed_streams: if not stream.passed: continue stream = get_corner_frequencies( stream, method="snr", snr=snr_config ) if stream[0].hasParameter('corner_frequencies'): cfdict = stream[0].getParameter('corner_frequencies') lp.append(cfdict['lowpass']) hp.append(cfdict['highpass']) np.testing.assert_allclose( np.sort(hp), [0.00751431, 0.01354455, 0.04250735], atol=1e-6 )
def test_StreamCollection(): # read usc data dpath = os.path.join('data', 'testdata', 'usc', 'ci3144585') directory = pkg_resources.resource_filename('gmprocess', dpath) usc_streams, unprocessed_files, unprocessed_file_errors = \ directory_to_streams(directory) assert len(usc_streams) == 7 usc_sc = StreamCollection(usc_streams) # Use print method print(usc_sc) # Use len method assert len(usc_sc) == 3 # Use nonzero method assert bool(usc_sc) # Slice lengths = [ len(usc_sc[0]), len(usc_sc[1]), len(usc_sc[2]) ] sort_lengths = np.sort(lengths) assert sort_lengths[0] == 1 assert sort_lengths[1] == 3 assert sort_lengths[2] == 3 # read dmg data dpath = os.path.join('data', 'testdata', 'dmg', 'ci3144585') directory = pkg_resources.resource_filename('gmprocess', dpath) dmg_streams, unprocessed_files, unprocessed_file_errors = \ directory_to_streams(directory) assert len(dmg_streams) == 1 dmg_sc = StreamCollection(dmg_streams) # Has one station assert len(dmg_sc) == 1 # With 3 channels assert len(dmg_sc[0]) == 3 # So this should have 4 stations test1 = dmg_sc + usc_sc assert len(test1) == 4 # Overwrite the dmg station and network to force it to be # a duplicate of one of the stations in usc_sc to check if # validation works with these addition methods for tr in dmg_sc[0]: tr.stats['network'] = 'LA' tr.stats['station'] = '57' test3 = dmg_sc + usc_sc assert len(test3) == 3 # usc_sc has 1 channel for station 57 and the modified # dmg_sc has 3 channels so the combined StreamCollection # should have 4 assert len(test3[0]) == 4 test_copy = dmg_sc.copy() assert test_copy[0][0].stats['standard']['process_level'] == \ 'corrected physical units' # Appending dmg should not add to length because of the # overwriting of the station/network above stream1 = test_copy[0] test_append = usc_sc.append(stream1) assert len(test_append) == 3 # Change back to unique values for station/network for tr in dmg_sc[0]: tr.stats['network'] = 'LALALA' tr.stats['station'] = '575757' stream2 = dmg_sc[0] test_append = usc_sc.append(stream2) assert len(test_append) == 4 # Check the from_directory method sc_test = StreamCollection.from_directory(directory) assert len(sc_test) == 1 # Test to_dataframe jsonfile = os.path.join(directory, 'event.json') with open(jsonfile, 'rt') as f: origin = json.load(f) dmg_df = sc_test.to_dataframe(origin) np.testing.assert_allclose( dmg_df['HN1']['PGA'], 0.145615, atol=1e5)
def test_StreamCollection(): # read usc data dpath = os.path.join('data', 'testdata', 'usc', 'ci3144585') directory = pkg_resources.resource_filename('gmprocess', dpath) usc_streams, unprocessed_files, unprocessed_file_errors = \ directory_to_streams(directory) assert len(usc_streams) == 7 usc_sc = StreamCollection(usc_streams) # Use print method print(usc_sc) # Use len method assert len(usc_sc) == 3 # Use nonzero method assert bool(usc_sc) # Slice lengths = [len(usc_sc[0]), len(usc_sc[1]), len(usc_sc[2])] sort_lengths = np.sort(lengths) assert sort_lengths[0] == 1 assert sort_lengths[1] == 3 assert sort_lengths[2] == 3 # read dmg data dpath = os.path.join('data', 'testdata', 'dmg', 'ci3144585') directory = pkg_resources.resource_filename('gmprocess', dpath) dmg_streams, unprocessed_files, unprocessed_file_errors = \ directory_to_streams(directory) assert len(dmg_streams) == 1 dmg_sc = StreamCollection(dmg_streams) # Has one station assert len(dmg_sc) == 1 # With 3 channels assert len(dmg_sc[0]) == 3 # So this should have 4 stations test1 = dmg_sc + usc_sc assert len(test1) == 4 test_copy = dmg_sc.copy() assert test_copy[0][0].stats['standard']['process_level'] == \ 'uncorrected physical units' stream1 = test_copy[0] test_append = usc_sc.append(stream1) assert len(test_append) == 4 # Change back to unique values for station/network for tr in dmg_sc[0]: tr.stats['network'] = 'LALALA' tr.stats['station'] = '575757' stream2 = dmg_sc[0] test_append = usc_sc.append(stream2) assert len(test_append) == 4 # Check the from_directory method sc_test = StreamCollection.from_directory(directory) assert len(sc_test) == 1 # Test to_dataframe jsonfile = os.path.join(directory, 'event.json') with open(jsonfile, 'rt') as f: origin = json.load(f) dmg_df = sc_test.to_dataframe(origin) np.testing.assert_allclose(dmg_df['H1']['PGA'], 0.145615, atol=1e5) # Check the from_traces method traces = [] for st in sc_test: for tr in st: traces.append(tr) sc_test = StreamCollection.from_traces(traces) assert len(sc_test) == 1
def test_corner_frequencies(): # Default config has 'constant' corner frequency method, so the need # here is to force the 'snr' method. data_files, origin = read_data_dir('geonet', 'us1000778i', '*.V1A') streams = [] for f in data_files: streams += read_data(f) sc = StreamCollection(streams) config = get_config() window_conf = config['windows'] processed_streams = sc.copy() for st in processed_streams: if st.passed: # Estimate noise/signal split time event_time = origin.time event_lon = origin.longitude event_lat = origin.latitude st = signal_split(st, origin) # Estimate end of signal end_conf = window_conf['signal_end'] event_mag = origin.magnitude print(st) st = signal_end(st, event_time=event_time, event_lon=event_lon, event_lat=event_lat, event_mag=event_mag, **end_conf) wcheck_conf = window_conf['window_checks'] st = window_checks( st, min_noise_duration=wcheck_conf['min_noise_duration'], min_signal_duration=wcheck_conf['min_signal_duration']) pconfig = config['processing'] # Run SNR check # I think we don't do this anymore. test = [d for d in pconfig if list(d.keys())[0] == 'compute_snr'] snr_config = test[0]['compute_snr'] for stream in processed_streams: stream = compute_snr(stream, **snr_config) # Run get_corner_frequencies test = [ d for d in pconfig if list(d.keys())[0] == 'get_corner_frequencies' ] cf_config = test[0]['get_corner_frequencies'] snr_config = cf_config['snr'] # With same_horiz False snr_config['same_horiz'] = False lp = [] hp = [] for stream in processed_streams: if not stream.passed: continue stream = get_corner_frequencies(stream, method="snr", snr=snr_config) if stream[0].hasParameter('corner_frequencies'): cfdict = stream[0].getParameter('corner_frequencies') lp.append(cfdict['lowpass']) hp.append(cfdict['highpass']) np.testing.assert_allclose(np.sort(hp), [0.00751431, 0.01354455, 0.04250735], atol=1e-6) st = processed_streams.select(station='HSES')[0] lps = [tr.getParameter('corner_frequencies')['lowpass'] for tr in st] hps = [tr.getParameter('corner_frequencies')['highpass'] for tr in st] np.testing.assert_allclose(np.sort(lps), [100., 100., 100.], atol=1e-6) np.testing.assert_allclose(np.sort(hps), [0.00305176, 0.00751431, 0.02527502], atol=1e-6) # With same_horiz True snr_config['same_horiz'] = True lp = [] hp = [] for stream in processed_streams: if not stream.passed: continue stream = get_corner_frequencies(stream, method="snr", snr=snr_config) if stream[0].hasParameter('corner_frequencies'): cfdict = stream[0].getParameter('corner_frequencies') lp.append(cfdict['lowpass']) hp.append(cfdict['highpass']) np.testing.assert_allclose(np.sort(hp), [0.00751431, 0.01354455, 0.04882812], atol=1e-6) st = processed_streams.select(station='HSES')[0] lps = [tr.getParameter('corner_frequencies')['lowpass'] for tr in st] hps = [tr.getParameter('corner_frequencies')['highpass'] for tr in st] np.testing.assert_allclose(np.sort(lps), [100., 100., 100.], atol=1e-6) np.testing.assert_allclose(np.sort(hps), [0.00751431, 0.00751431, 0.02527502], atol=1e-6)
def test_StreamCollection(): # read usc data dpath = os.path.join('data', 'testdata', 'usc', 'ci3144585') directory = pkg_resources.resource_filename('gmprocess', dpath) usc_streams, unprocessed_files, unprocessed_file_errors = \ directory_to_streams(directory) assert len(usc_streams) == 7 usc_sc = StreamCollection(usc_streams) # Use print method print(usc_sc) # Use len method assert len(usc_sc) == 3 # Use nonzero method assert bool(usc_sc) # Slice lengths = [ len(usc_sc[0]), len(usc_sc[1]), len(usc_sc[2]) ] sort_lengths = np.sort(lengths) assert sort_lengths[0] == 1 assert sort_lengths[1] == 3 assert sort_lengths[2] == 3 # read dmg data dpath = os.path.join('data', 'testdata', 'dmg', 'ci3144585') directory = pkg_resources.resource_filename('gmprocess', dpath) dmg_streams, unprocessed_files, unprocessed_file_errors = \ directory_to_streams(directory) assert len(dmg_streams) == 1 dmg_sc = StreamCollection(dmg_streams) # Has one station assert len(dmg_sc) == 1 # With 3 channels assert len(dmg_sc[0]) == 3 # So this should have 4 stations test1 = dmg_sc + usc_sc assert len(test1) == 4 test_copy = dmg_sc.copy() assert test_copy[0][0].stats['standard']['process_level'] == \ 'corrected physical units' stream1 = test_copy[0] test_append = usc_sc.append(stream1) assert len(test_append) == 4 # Change back to unique values for station/network for tr in dmg_sc[0]: tr.stats['network'] = 'LALALA' tr.stats['station'] = '575757' stream2 = dmg_sc[0] test_append = usc_sc.append(stream2) assert len(test_append) == 4 # Check the from_directory method sc_test = StreamCollection.from_directory(directory) assert len(sc_test) == 1 # Test to_dataframe jsonfile = os.path.join(directory, 'event.json') with open(jsonfile, 'rt') as f: origin = json.load(f) dmg_df = sc_test.to_dataframe(origin) np.testing.assert_allclose( dmg_df['HN1']['PGA'], 0.145615, atol=1e5)