def generate_workspace(): """Generate simple HDF5 with ASDF layout for testing.""" PCOMMANDS = [ "assemble", "process", ] EVENTID = "us1000778i" LABEL = "ptest" datafiles, event = read_data_dir("geonet", EVENTID, "*.V1A") tdir = tempfile.mkdtemp() tfilename = os.path.join(tdir, "workspace.h5") raw_data = [] for dfile in datafiles: raw_data += read_data(dfile) write_asdf(tfilename, raw_data, event, label="unprocessed") del raw_data config = update_config(os.path.join(datadir, "config_min_freq_0p2.yml")) workspace = StreamWorkspace.open(tfilename) raw_streams = workspace.getStreams(EVENTID, labels=["unprocessed"], config=config) pstreams = process_streams(raw_streams, event, config=config) workspace.addStreams(event, pstreams, label=LABEL) workspace.calcMetrics(event.id, labels=[LABEL], config=config) return tfilename
def generate_workspace(): """Generate simple HDF5 with ASDF layout for testing. """ PCOMMANDS = [ 'assemble', 'process', ] EVENTID = 'us1000778i' LABEL = 'ptest' datafiles, event = read_data_dir('geonet', EVENTID, '*.V1A') tdir = tempfile.mkdtemp() tfilename = os.path.join(tdir, 'workspace.h5') raw_data = [] for dfile in datafiles: raw_data += read_data(dfile) write_asdf(tfilename, raw_data, event, label="unprocessed") del raw_data config = get_config() workspace = StreamWorkspace.open(tfilename) raw_streams = workspace.getStreams(EVENTID, labels=['unprocessed']) pstreams = process_streams(raw_streams, event, config=config) workspace.addStreams(event, pstreams, label=LABEL) workspace.calcMetrics(event.id, labels=[LABEL], config=config) return tfilename
def test_asdf(): eventid = 'us1000778i' datafiles, origin = read_data_dir('geonet', eventid, '*.V1A') event = get_event_object(origin) tdir = tempfile.mkdtemp() try: config = get_config() tfile = os.path.join(tdir, 'test.hdf') raw_streams = [] for dfile in datafiles: raw_streams += read_data(dfile) write_asdf(tfile, raw_streams, event) assert is_asdf(tfile) assert not is_asdf(datafiles[0]) outstreams = read_asdf(tfile) assert len(outstreams) == len(raw_streams) write_asdf(tfile, raw_streams, event, label='foo') outstreams2 = read_asdf(tfile, label='foo') assert len(outstreams2) == len(raw_streams) except Exception: assert 1 == 2 finally: shutil.rmtree(tdir)
def test_fdsnfetch(): # homedir = os.path.dirname(os.path.abspath(__file__)) # fdsnfetch = os.path.join(homedir, '..', '..', 'bin', 'fdsnfetch') # datadir = os.path.join(homedir, '..', 'data', 'fdsnfetch') # parameters = '2001-02-28T18:54:32 47.149 -122.7266667 ' # cmd_input = '%s %s' % (datadir, parameters) # cmd = '%s %s' % (fdsnfetch, cmd_input) # res, stdout, stderr = get_command_output(cmd) # print(stdout.decode('utf-8').strip()) # print(stderr.decode('utf-8').strip()) # Confirm that we got the three ALCT files as expected streams, inv = request_raw_waveforms('IRIS', '2001-02-28T18:54:32', 47.149, -122.7266667, before_time=120, after_time=120, dist_max=1.0, stations=['ALCT'], networks=["UW"], channels=['EN*']) assert len(streams) == 3 # Test writing out the streams in ASDF format tdir = tempfile.mkdtemp() tfile = os.path.join(tdir, 'test.hdf') event_dict = get_event_object('uw10530748') write_asdf(tfile, streams, event_dict, label='foo')
def test_asdf(): eventid = 'us1000778i' datafiles, event = read_data_dir('geonet', eventid, '*.V1A') tdir = tempfile.mkdtemp() try: tfile = os.path.join(tdir, 'test.hdf') raw_streams = [] for dfile in datafiles: raw_streams += read_data(dfile) write_asdf(tfile, raw_streams, event) assert is_asdf(tfile) assert not is_asdf(datafiles[0]) outstreams = read_asdf(tfile) assert len(outstreams) == len(raw_streams) write_asdf(tfile, raw_streams, event, label='foo') outstreams2 = read_asdf(tfile, label='foo') assert len(outstreams2) == len(raw_streams) except Exception as e: raise(e) finally: shutil.rmtree(tdir)
def test_asdf(): eventid = 'us1000778i' datafiles, event = read_data_dir('geonet', eventid, '*.V1A') tdir = tempfile.mkdtemp() try: config = get_config() tfile = os.path.join(tdir, 'test.hdf') raw_streams = [] for dfile in datafiles: raw_streams += read_data(dfile) write_asdf(tfile, raw_streams, event) assert is_asdf(tfile) assert not is_asdf(datafiles[0]) outstreams = read_asdf(tfile) assert len(outstreams) == len(raw_streams) write_asdf(tfile, raw_streams, event, label='foo') outstreams2 = read_asdf(tfile, label='foo') assert len(outstreams2) == len(raw_streams) except Exception: assert 1 == 2 finally: shutil.rmtree(tdir)