Exemplo n.º 1
0
def test_raw():
    msg = "dataset.value has been deprecated. Use dataset[()] instead."
    with warnings.catch_warnings():
        warnings.filterwarnings("ignore", category=H5pyDeprecationWarning)
        warnings.filterwarnings("ignore", category=YAMLLoadWarning)
        warnings.filterwarnings("ignore", category=FutureWarning)
        raw_streams, inv = request_raw_waveforms(
            fdsn_client='IRIS',
            org_time='2018-11-30T17-29-29.330Z',
            lat=61.3464,
            lon=-149.9552,
            before_time=120,
            after_time=120,
            dist_min=0,
            dist_max=0.135,
            networks='*',
            stations='*',
            channels=['?N?'],
            access_restricted=False)
        tdir = tempfile.mkdtemp()
        try:
            edict = get_event_dict('ak20419010')
            origin = get_event_object('ak20419010')
            tfile = os.path.join(tdir, 'test.hdf')
            sc1 = StreamCollection(raw_streams)
            workspace = StreamWorkspace(tfile)
            workspace.addStreams(origin, sc1, label='raw')
            tstreams = workspace.getStreams(edict['id'])
            assert len(tstreams) == 0

            imclist = [
                'greater_of_two_horizontals', 'channels', 'rotd50', 'rotd100'
            ]
            imtlist = ['sa1.0', 'PGA', 'pgv', 'fas2.0', 'arias']
            # this shouldn't do anything
            workspace.setStreamMetrics(edict['id'],
                                       imclist=imclist,
                                       imtlist=imtlist)

            processed_streams = process_streams(sc1, edict)
            workspace.addStreams(origin, processed_streams, 'processed')
            labels = workspace.getLabels()
            tags = workspace.getStreamTags(edict['id'])
            out_raw_streams = workspace.getStreams(edict['id'], get_raw=True)
            assert len(out_raw_streams) == len(sc1)

            # this should only work on processed data
            workspace.setStreamMetrics(edict['id'],
                                       imclist=imclist,
                                       imtlist=imtlist)

            df = workspace.summarizeLabels()
            x = 1

        except Exception as e:
            raise e
        finally:
            shutil.rmtree(tdir)
Exemplo n.º 2
0
def test_stream_params():
    eventid = 'us1000778i'
    datafiles, event = read_data_dir(
        'geonet',
        eventid,
        '20161113_110259_WTMC_20.V1A'
    )
    tdir = tempfile.mkdtemp()
    streams = []
    try:
        streams += read_data(datafiles[0])
        statsdict = {'name': 'Fred', 'age': 34}
        streams[0].setStreamParam('stats', statsdict)
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, streams, label='stats')
        outstreams = workspace.getStreams(event.id, labels=['stats'])
        cmpdict = outstreams[0].getStreamParam('stats')
        assert cmpdict == statsdict
        workspace.close()
    except Exception as e:
        raise(e)
    finally:
        shutil.rmtree(tdir)
Exemplo n.º 3
0
def test_metrics():
    eventid = 'usb000syza'
    datafiles, event = read_data_dir('knet', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = update_config(os.path.join(datadir, 'config_min_freq_0p2.yml'))
    # turn off sta/lta check and snr checks
    # newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    # processed_streams = process_streams(raw_streams, event, config=newconfig)
    newconfig = config.copy()
    newconfig['processing'].append(
        {'NNet_QA': {
            'acceptance_threshold': 0.5,
            'model_name': 'CantWell'
        }})
    processed_streams = process_streams(raw_streams.copy(),
                                        event,
                                        config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, raw_streams, label='raw')
        workspace.addStreams(event, processed_streams, label='processed')
        stream1 = raw_streams[0]
        summary1 = StationSummary.from_config(stream1)
        s1_df_in = summary1.pgms.sort_values(['IMT', 'IMC'])
        array1 = s1_df_in['Result'].to_numpy()
        workspace.calcMetrics(eventid, labels=['raw'])
        pstreams2 = workspace.getStreams(event.id, labels=['processed'])
        assert pstreams2[0].getStreamParamKeys() == ['nnet_qa']
        summary1_a = workspace.getStreamMetrics(event.id,
                                                stream1[0].stats.network,
                                                stream1[0].stats.station,
                                                'raw')
        s1_df_out = summary1_a.pgms.sort_values(['IMT', 'IMC'])
        array2 = s1_df_out['Result'].to_numpy()
        np.testing.assert_almost_equal(array1, array2, decimal=4)

        workspace.close()
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
Exemplo n.º 4
0
def _test_stream_params():
    eventid = "us1000778i"
    datafiles, event = read_data_dir("geonet", eventid,
                                     "20161113_110259_WTMC_20.V1A")
    tdir = tempfile.mkdtemp()
    streams = []
    try:
        streams += read_data(datafiles[0])
        statsdict = {"name": "Fred", "age": 34}
        streams[0].setStreamParam("stats", statsdict)
        tfile = os.path.join(tdir, "test.hdf")
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, streams, label="stats")
        outstreams = workspace.getStreams(event.id, labels=["stats"])
        cmpdict = outstreams[0].getStreamParam("stats")
        assert cmpdict == statsdict
        workspace.close()
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
def test_stream_params():
    eventid = 'us1000778i'
    datafiles, event = read_data_dir('geonet',
                                     eventid,
                                     '20161113_110259_WTMC_20.V1A')
    tdir = tempfile.mkdtemp()
    streams = []
    try:
        streams += read_data(datafiles[0])
        statsdict = {'name': 'Fred', 'age': 34}
        streams[0].setStreamParam('stats', statsdict)
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, streams, label='stats')
        outstreams = workspace.getStreams(event.id, labels=['stats'])
        cmpdict = outstreams[0].getStreamParam('stats')
        assert cmpdict == statsdict
        workspace.close()
    except Exception as e:
        raise(e)
    finally:
        shutil.rmtree(tdir)
def test_workspace():
    eventid = 'us1000778i'
    datafiles, event = read_data_dir('geonet', eventid, '*.V1A')
    tdir = tempfile.mkdtemp()
    try:
        with warnings.catch_warnings():
            warnings.filterwarnings("ignore", category=H5pyDeprecationWarning)
            warnings.filterwarnings("ignore", category=YAMLLoadWarning)
            warnings.filterwarnings("ignore", category=FutureWarning)
            config = get_config()
            tfile = os.path.join(tdir, 'test.hdf')
            raw_streams = []
            for dfile in datafiles:
                raw_streams += read_data(dfile)

            workspace = StreamWorkspace(tfile)
            t1 = time.time()
            workspace.addStreams(event, raw_streams, label='raw')
            t2 = time.time()
            print('Adding %i streams took %.2f seconds' %
                  (len(raw_streams), (t2 - t1)))

            str_repr = workspace.__repr__()
            assert str_repr == 'Events: 1 Stations: 3 Streams: 3'

            eventobj = workspace.getEvent(eventid)
            assert eventobj.origins[0].latitude == event.origins[0].latitude
            assert eventobj.magnitudes[0].mag == event.magnitudes[0].mag

            stations = workspace.getStations()
            assert sorted(stations) == ['hses', 'thz', 'wtmc']

            stations = workspace.getStations(eventid=eventid)
            assert sorted(stations) == ['hses', 'thz', 'wtmc']

            # test retrieving tags for an event that doesn't exist
            try:
                workspace.getStreamTags('foo')
            except KeyError:
                assert 1 == 1

            # test retrieving event that doesn't exist
            try:
                workspace.getEvent('foo')
            except KeyError:
                assert 1 == 1

            instream = None
            for stream in raw_streams:
                if stream[0].stats.station.lower() == 'hses':
                    instream = stream
                    break
            if instream is None:
                assert 1 == 2
            outstream = workspace.getStreams(eventid,
                                             stations=['hses'],
                                             labels=['raw'])[0]
            compare_streams(instream, outstream)

            label_summary = workspace.summarizeLabels()
            assert label_summary.iloc[0]['Label'] == 'raw'
            assert label_summary.iloc[0]['Software'] == 'gmprocess'

            sc = StreamCollection(raw_streams)
            processed_streams = process_streams(sc, event, config=config)
            workspace.addStreams(event, processed_streams, 'processed')

            idlist = workspace.getEventIds()
            assert idlist[0] == eventid

            event_tags = workspace.getStreamTags(eventid)
            assert sorted(event_tags) == ['hses_processed', 'hses_raw',
                                          'thz_processed', 'thz_raw',
                                          'wtmc_processed', 'wtmc_raw']
            outstream = workspace.getStreams(eventid,
                                             stations=['hses'],
                                             labels=['processed'])[0]

            provenance = workspace.getProvenance(eventid, labels=['processed'])
            first_row = pd.Series({'Record': 'NZ.HSES.HN1',
                                   'Processing Step': 'Remove Response',
                                   'Step Attribute': 'input_units',
                                   'Attribute Value': 'counts'})

            last_row = pd.Series({'Record': 'NZ.WTMC.HNZ',
                                  'Processing Step': 'Lowpass Filter',
                                  'Step Attribute': 'number_of_passes',
                                  'Attribute Value': 2})
            assert provenance.iloc[0].equals(first_row)
            assert provenance.iloc[-1].equals(last_row)

            # compare the parameters from the input processed stream
            # to it's output equivalent
            instream = None
            for stream in processed_streams:
                if stream[0].stats.station.lower() == 'hses':
                    instream = stream
                    break
            if instream is None:
                assert 1 == 2
            compare_streams(instream, outstream)
            workspace.close()

            # read in data from a second event and stash it in the workspace
            eventid = 'nz2018p115908'
            datafiles, event = read_data_dir('geonet', eventid, '*.V2A')
            raw_streams = []
            for dfile in datafiles:
                raw_streams += read_data(dfile)

            workspace = StreamWorkspace.open(tfile)
            workspace.addStreams(event, raw_streams, label='foo')

            stations = workspace.getStations(eventid)

            eventids = workspace.getEventIds()
            assert eventids == ['us1000778i', 'nz2018p115908']
            instation = raw_streams[0][0].stats.station
            this_stream = workspace.getStreams(eventid,
                                               stations=[instation],
                                               labels=['foo'])[0]
            assert instation == this_stream[0].stats.station
            usid = 'us1000778i'
            inventory = workspace.getInventory(usid)
            codes = [station.code for station in inventory.networks[0].stations]
            assert sorted(codes) == ['HSES', 'THZ', 'WPWS', 'WTMC']

    except Exception as e:
        raise(e)
    finally:
        shutil.rmtree(tdir)
Exemplo n.º 7
0
def _test_workspace():
    eventid = "us1000778i"
    datafiles, event = read_data_dir("geonet", eventid, "*.V1A")
    tdir = tempfile.mkdtemp()
    try:
        with warnings.catch_warnings():
            warnings.filterwarnings("ignore", category=H5pyDeprecationWarning)
            warnings.filterwarnings("ignore", category=YAMLError)
            warnings.filterwarnings("ignore", category=FutureWarning)
            config = update_config(
                os.path.join(datadir, "config_min_freq_0p2.yml"))
            tfile = os.path.join(tdir, "test.hdf")
            raw_streams = []
            for dfile in datafiles:
                raw_streams += read_data(dfile)

            workspace = StreamWorkspace(tfile)
            t1 = time.time()
            workspace.addStreams(event, raw_streams, label="raw")
            t2 = time.time()
            print("Adding %i streams took %.2f seconds" % (len(raw_streams),
                                                           (t2 - t1)))

            str_repr = workspace.__repr__()
            assert str_repr == "Events: 1 Stations: 3 Streams: 3"

            eventobj = workspace.getEvent(eventid)
            assert eventobj.origins[0].latitude == event.origins[0].latitude
            assert eventobj.magnitudes[0].mag == event.magnitudes[0].mag

            stations = workspace.getStations()
            assert sorted(stations) == ["HSES", "THZ", "WTMC"]

            stations = workspace.getStations()
            assert sorted(stations) == ["HSES", "THZ", "WTMC"]

            # test retrieving event that doesn't exist
            with pytest.raises(KeyError):
                workspace.getEvent("foo")

            instream = None
            for stream in raw_streams:
                if stream[0].stats.station.lower() == "hses":
                    instream = stream
                    break
            if instream is None:
                raise ValueError("Instream should not be none.")
            outstream = workspace.getStreams(eventid,
                                             stations=["HSES"],
                                             labels=["raw"])[0]
            compare_streams(instream, outstream)

            label_summary = workspace.summarizeLabels()
            assert label_summary.iloc[0]["Label"] == "raw"
            assert label_summary.iloc[0]["Software"] == "gmprocess"

            sc = StreamCollection(raw_streams)
            processed_streams = process_streams(sc, event, config=config)
            workspace.addStreams(event, processed_streams, "processed")

            idlist = workspace.getEventIds()
            assert idlist[0] == eventid

            outstream = workspace.getStreams(eventid,
                                             stations=["HSES"],
                                             labels=["processed"])[0]

            provenance = workspace.getProvenance(eventid, labels=["processed"])
            first_row = pd.Series({
                "Record": "NZ.HSES.--.HN1_us1000778i_processed",
                "Processing Step": "Remove Response",
                "Step Attribute": "input_units",
                "Attribute Value": "counts",
            })

            last_row = pd.Series({
                "Record": "NZ.WTMC.--.HNZ_us1000778i_processed",
                "Processing Step": "Lowpass Filter",
                "Step Attribute": "number_of_passes",
                "Attribute Value": 2,
            })
            assert provenance.iloc[0].equals(first_row)
            assert provenance.iloc[-1].equals(last_row)

            # compare the parameters from the input processed stream
            # to it's output equivalent
            instream = None
            for stream in processed_streams:
                if stream[0].stats.station.lower() == "hses":
                    instream = stream
                    break
            if instream is None:
                raise ValueError("Instream should not be none.")
            compare_streams(instream, outstream)
            workspace.close()

            # read in data from a second event and stash it in the workspace
            eventid = "nz2018p115908"
            datafiles, event = read_data_dir("geonet", eventid, "*.V2A")
            raw_streams = []
            for dfile in datafiles:
                raw_streams += read_data(dfile)

            workspace = StreamWorkspace.open(tfile)
            workspace.addStreams(event, raw_streams, label="foo")

            stations = workspace.getStations()

            eventids = workspace.getEventIds()
            assert eventids == ["us1000778i", "nz2018p115908"]
            instation = raw_streams[0][0].stats.station
            this_stream = workspace.getStreams(eventid,
                                               stations=[instation],
                                               labels=["foo"])[0]
            assert instation == this_stream[0].stats.station
            usid = "us1000778i"
            inventory = workspace.getInventory(usid)
            workspace.close()
            codes = [
                station.code for station in inventory.networks[0].stations
            ]
            assert sorted(set(codes)) == ["HSES", "THZ", "WPWS", "WTMC"]

    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
def _test_workspace():
    eventid = 'us1000778i'
    datafiles, event = read_data_dir('geonet', eventid, '*.V1A')
    tdir = tempfile.mkdtemp()
    try:
        with warnings.catch_warnings():
            warnings.filterwarnings("ignore", category=H5pyDeprecationWarning)
            warnings.filterwarnings("ignore", category=YAMLLoadWarning)
            warnings.filterwarnings("ignore", category=FutureWarning)
            config = update_config(
                os.path.join(datadir, 'config_min_freq_0p2.yml'))
            tfile = os.path.join(tdir, 'test.hdf')
            raw_streams = []
            for dfile in datafiles:
                raw_streams += read_data(dfile)

            workspace = StreamWorkspace(tfile)
            t1 = time.time()
            workspace.addStreams(event, raw_streams, label='raw')
            t2 = time.time()
            print('Adding %i streams took %.2f seconds' % (len(raw_streams),
                                                           (t2 - t1)))

            str_repr = workspace.__repr__()
            assert str_repr == 'Events: 1 Stations: 3 Streams: 3'

            eventobj = workspace.getEvent(eventid)
            assert eventobj.origins[0].latitude == event.origins[0].latitude
            assert eventobj.magnitudes[0].mag == event.magnitudes[0].mag

            stations = workspace.getStations()
            assert sorted(stations) == ['HSES', 'THZ', 'WTMC']

            stations = workspace.getStations(eventid=eventid)
            assert sorted(stations) == ['HSES', 'THZ', 'WTMC']

            # test retrieving event that doesn't exist
            with pytest.raises(KeyError):
                workspace.getEvent('foo')

            instream = None
            for stream in raw_streams:
                if stream[0].stats.station.lower() == 'hses':
                    instream = stream
                    break
            if instream is None:
                raise ValueError('Instream should not be none.')
            outstream = workspace.getStreams(eventid,
                                             stations=['HSES'],
                                             labels=['raw'])[0]
            compare_streams(instream, outstream)

            label_summary = workspace.summarizeLabels()
            assert label_summary.iloc[0]['Label'] == 'raw'
            assert label_summary.iloc[0]['Software'] == 'gmprocess'

            sc = StreamCollection(raw_streams)
            processed_streams = process_streams(sc, event, config=config)
            workspace.addStreams(event, processed_streams, 'processed')

            idlist = workspace.getEventIds()
            assert idlist[0] == eventid

            outstream = workspace.getStreams(eventid,
                                             stations=['HSES'],
                                             labels=['processed'])[0]

            provenance = workspace.getProvenance(eventid, labels=['processed'])
            first_row = pd.Series({
                'Record': 'NZ.HSES.--.HN1_us1000778i_processed',
                'Processing Step': 'Remove Response',
                'Step Attribute': 'input_units',
                'Attribute Value': 'counts'
            })

            last_row = pd.Series({
                'Record': 'NZ.WTMC.--.HNZ_us1000778i_processed',
                'Processing Step': 'Lowpass Filter',
                'Step Attribute': 'number_of_passes',
                'Attribute Value': 2
            })
            assert provenance.iloc[0].equals(first_row)
            assert provenance.iloc[-1].equals(last_row)

            # compare the parameters from the input processed stream
            # to it's output equivalent
            instream = None
            for stream in processed_streams:
                if stream[0].stats.station.lower() == 'hses':
                    instream = stream
                    break
            if instream is None:
                raise ValueError('Instream should not be none.')
            compare_streams(instream, outstream)
            workspace.close()

            # read in data from a second event and stash it in the workspace
            eventid = 'nz2018p115908'
            datafiles, event = read_data_dir('geonet', eventid, '*.V2A')
            raw_streams = []
            for dfile in datafiles:
                raw_streams += read_data(dfile)

            workspace = StreamWorkspace.open(tfile)
            workspace.addStreams(event, raw_streams, label='foo')

            stations = workspace.getStations(eventid)

            eventids = workspace.getEventIds()
            assert eventids == ['us1000778i', 'nz2018p115908']
            instation = raw_streams[0][0].stats.station
            this_stream = workspace.getStreams(eventid,
                                               stations=[instation],
                                               labels=['foo'])[0]
            assert instation == this_stream[0].stats.station
            usid = 'us1000778i'
            inventory = workspace.getInventory(usid)
            workspace.close()
            codes = [
                station.code for station in inventory.networks[0].stations
            ]
            assert sorted(set(codes)) == ['HSES', 'THZ', 'WPWS', 'WTMC']

    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
Exemplo n.º 9
0
def test_workspace():
    eventid = 'us1000778i'
    datafiles, origin = read_data_dir('geonet', eventid, '*.V1A')
    event = get_event_object(origin)
    tdir = tempfile.mkdtemp()
    try:
        with warnings.catch_warnings():
            warnings.filterwarnings("ignore", category=H5pyDeprecationWarning)
            warnings.filterwarnings("ignore", category=YAMLLoadWarning)
            warnings.filterwarnings("ignore", category=FutureWarning)
            config = get_config()
            tfile = os.path.join(tdir, 'test.hdf')
            raw_streams = []
            for dfile in datafiles:
                raw_streams += read_data(dfile)

            workspace = StreamWorkspace(tfile)
            t1 = time.time()
            workspace.addStreams(event, raw_streams, label='raw')
            t2 = time.time()
            print('Adding %i streams took %.2f seconds' % (len(raw_streams),
                                                           (t2 - t1)))

            str_repr = workspace.__repr__()
            assert str_repr == 'Events: 1 Stations: 3 Streams: 3'

            eventobj = workspace.getEvent(eventid)
            assert eventobj.origins[0].latitude == event.origins[0].latitude
            assert eventobj.magnitudes[0].mag == event.magnitudes[0].mag

            stations = workspace.getStations()
            assert sorted(stations) == ['hses', 'thz', 'wtmc']

            stations = workspace.getStations(eventid=eventid)
            assert sorted(stations) == ['hses', 'thz', 'wtmc']

            # test retrieving tags for an event that doesn't exist
            try:
                workspace.getStreamTags('foo')
            except KeyError:
                assert 1 == 1

            # test retrieving event that doesn't exist
            try:
                workspace.getEvent('foo')
            except KeyError:
                assert 1 == 1

            instream = None
            for stream in raw_streams:
                if stream[0].stats.station.lower() == 'hses':
                    instream = stream
                    break
            if instream is None:
                assert 1 == 2
            outstream = workspace.getStreams(eventid,
                                             stations=['hses'],
                                             labels=['raw'])[0]
            compare_streams(instream, outstream)

            label_summary = workspace.summarizeLabels()
            assert label_summary.iloc[0]['Label'] == 'raw'
            assert label_summary.iloc[0]['Software'] == 'gmprocess'

            sc = StreamCollection(raw_streams)
            processed_streams = process_streams(sc, origin, config=config)
            workspace.addStreams(event, processed_streams, 'processed')

            idlist = workspace.getEventIds()
            assert idlist[0] == eventid

            event_tags = workspace.getStreamTags(eventid)
            assert sorted(event_tags) == [
                'hses_processed', 'hses_raw', 'thz_processed', 'thz_raw',
                'wtmc_processed', 'wtmc_raw'
            ]
            outstream = workspace.getStreams(eventid,
                                             stations=['hses'],
                                             labels=['processed'])[0]

            provenance = workspace.getProvenance(eventid, labels=['processed'])
            first_row = pd.Series({
                'Record': 'NZ.HSES.HN1',
                'Processing Step': 'Remove Response',
                'Step Attribute': 'input_units',
                'Attribute Value': 'counts'
            })

            last_row = pd.Series({
                'Record': 'NZ.WTMC.HNZ',
                'Processing Step': 'Detrend',
                'Step Attribute': 'detrending_method',
                'Attribute Value': 'baseline_sixth_order'
            })
            assert provenance.iloc[0].equals(first_row)
            assert provenance.iloc[-1].equals(last_row)

            # compare the parameters from the input processed stream
            # to it's output equivalent
            instream = None
            for stream in processed_streams:
                if stream[0].stats.station.lower() == 'hses':
                    instream = stream
                    break
            if instream is None:
                assert 1 == 2
            compare_streams(instream, outstream)
            workspace.close()

            # read in data from a second event and stash it in the workspace
            eventid = 'nz2018p115908'
            datafiles, origin = read_data_dir('geonet', eventid, '*.V2A')
            raw_streams = []
            for dfile in datafiles:
                raw_streams += read_data(dfile)

            event = get_event_object(origin)
            workspace = StreamWorkspace.open(tfile)
            workspace.addStreams(event, raw_streams, label='foo')

            stations = workspace.getStations(eventid)

            eventids = workspace.getEventIds()
            assert eventids == ['us1000778i', 'nz2018p115908']
            instation = raw_streams[0][0].stats.station
            this_stream = workspace.getStreams(eventid,
                                               stations=[instation],
                                               labels=['foo'])[0]
            assert instation == this_stream[0].stats.station

            # set and retrieve waveform metrics in the file
            imclist = [
                'greater_of_two_horizontals', 'channels', 'rotd50', 'rotd100'
            ]
            imtlist = ['sa1.0', 'PGA', 'pgv', 'fas2.0', 'arias']
            usid = 'us1000778i'
            tags = workspace.getStreamTags(usid)
            workspace.setStreamMetrics(eventid,
                                       labels=['foo'],
                                       imclist=imclist,
                                       imtlist=imtlist)
            summary = workspace.getStreamMetrics(eventid, instation, 'foo')
            summary_series = summary.toSeries()['ARIAS']
            cmpseries = pd.Series({
                'GEOMETRIC_MEAN': np.NaN,
                'GREATER_OF_TWO_HORIZONTALS': 0.0005,
                'HN1': 0.0001,
                'HN2': 0.0005,
                'HNZ': 0.0000,
                'ROTD100.0': 0.0005,
                'ROTD50.0': 0.0003
            })
            assert cmpseries.equals(summary_series)

            workspace.setStreamMetrics(usid, labels=['processed'])
            df = workspace.getMetricsTable(usid, labels=['processed'])
            cmpdict = {
                'GREATER_OF_TWO_HORIZONTALS': [26.8906, 4.9415, 94.6646],
                'HN1': [24.5105, 4.9415, 94.6646],
                'HN2': [26.8906, 4.0758, 86.7877],
                'HNZ': [16.0941, 2.5401, 136.7054]
            }
            cmpframe = pd.DataFrame(cmpdict)
            assert df['PGA'].equals(cmpframe)

            inventory = workspace.getInventory(usid)
            codes = [
                station.code for station in inventory.networks[0].stations
            ]
            assert sorted(codes) == ['HSES', 'THZ', 'WPWS', 'WTMC']

    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)