Example #1
0
def test_metrics():
    eventid = 'usb000syza'
    datafiles, event = read_data_dir('knet', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = get_config()
    # turn off sta/lta check and snr checks
    newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    processed_streams = process_streams(raw_streams, event, config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, processed_streams, label='processed')
        stream1 = processed_streams[0]
        stream2 = processed_streams[1]
        summary1 = StationSummary.from_config(stream1)
        summary2 = StationSummary.from_config(stream2)
        workspace.setStreamMetrics(event.id, 'processed', summary1)
        workspace.setStreamMetrics(event.id, 'processed', summary2)
        workspace.calcStationMetrics(event.id, labels=['processed'])
        summary1_a = workspace.getStreamMetrics(event.id,
                                                stream1[0].stats.station,
                                                'processed')
        s1_df_in = summary1.pgms.sort_values(['IMT', 'IMC'])
        s1_df_out = summary1_a.pgms.sort_values(['IMT', 'IMC'])
        array1 = s1_df_in['Result'].as_matrix()
        array2 = s1_df_out['Result'].as_matrix()
        np.testing.assert_almost_equal(array1, array2, decimal=4)

        df = workspace.getMetricsTable(event.id)
        cmp_series = {
            'GREATER_OF_TWO_HORIZONTALS': 0.6787,
            'H1': 0.3869,
            'H2': 0.6787,
            'Z': 0.7663
        }
        pga_dict = df.iloc[0]['PGA'].to_dict()
        for key, value in pga_dict.items():
            value2 = cmp_series[key]
            np.testing.assert_almost_equal(value, value2, decimal=4)

        workspace.close()
    except Exception as e:
        raise(e)
    finally:
        shutil.rmtree(tdir)
def test_metrics():
    eventid = 'usb000syza'
    datafiles, event = read_data_dir('knet',
                                     eventid,
                                     '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = get_config()
    # turn off sta/lta check and snr checks
    newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    processed_streams = process_streams(raw_streams, event, config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, processed_streams, label='processed')
        stream1 = processed_streams[0]
        stream2 = processed_streams[1]
        summary1 = StationSummary.from_config(stream1)
        summary2 = StationSummary.from_config(stream2)
        workspace.setStreamMetrics(event.id, 'processed', summary1)
        workspace.setStreamMetrics(event.id, 'processed', summary2)
        summary1_a = workspace.getStreamMetrics(event.id,
                                                stream1[0].stats.station,
                                                'processed')
        s1_df_in = summary1.pgms.sort_values(['IMT', 'IMC'])
        s1_df_out = summary1_a.pgms.sort_values(['IMT', 'IMC'])
        array1 = s1_df_in['Result'].as_matrix()
        array2 = s1_df_out['Result'].as_matrix()
        np.testing.assert_almost_equal(array1, array2, decimal=4)

        df = workspace.getMetricsTable(event.id)
        cmp_series = {'GREATER_OF_TWO_HORIZONTALS': 0.6787,
                      'HN1': 0.3869,
                      'HN2': 0.6787,
                      'HNZ': 0.7663}
        pga_dict = df.iloc[0]['PGA'].to_dict()
        for key, value in pga_dict.items():
            value2 = cmp_series[key]
            np.testing.assert_almost_equal(value, value2, decimal=4)

        workspace.close()
    except Exception as e:
        raise(e)
    finally:
        shutil.rmtree(tdir)
    def calcStreamMetrics(self,
                          eventid,
                          stations=None,
                          labels=None,
                          config=None):
        """Create station metrics for specified event/streams.

        Args:
            eventid (str):
                ID of event to search for in ASDF file.
            stations (list):
                List of stations to create metrics for.
            labels (list):
                List of processing labels to create metrics for.
            imclist (list):
                List of valid component names.
            imtlist (list):
                List of valid IMT names.
            config (dict):
                Config dictionary.
        """
        if not self.hasEvent(eventid):
            fmt = 'No event matching %s found in workspace.'
            raise KeyError(fmt % eventid)

        streams = self.getStreams(eventid, stations=stations, labels=labels)
        event = self.getEvent(eventid)
        for stream in streams:
            tag = stream.tag
            eventid, station, label = tag.split('_')
            try:
                summary = StationSummary.from_config(stream,
                                                     event=event,
                                                     config=config)
            except Exception as pgme:
                fmt = 'Could not create stream metrics for event %s, station %s: "%s"'
                logging.warning(fmt % (eventid, station, str(pgme)))
                continue

            xmlstr = summary.get_metric_xml()

            path = '%s_%s_%s' % (eventid, summary.station_code.lower(), label)

            # this seems like a lot of effort
            # just to store a string in HDF, but other
            # approached failed. Suggestions are welcome.
            xmlbytes = xmlstr.encode('utf-8')
            jsonarray = np.frombuffer(xmlbytes, dtype=np.uint8)
            dtype = 'WaveFormMetrics'

            self.dataset.add_auxiliary_data(jsonarray,
                                            data_type=dtype,
                                            path=path,
                                            parameters={})
Example #4
0
def test_metrics():
    eventid = "usb000syza"
    datafiles, event = read_data_dir("knet", eventid, "*")
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = update_config(os.path.join(datadir, "config_min_freq_0p2.yml"))
    # turn off sta/lta check and snr checks
    # newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    # processed_streams = process_streams(raw_streams, event, config=newconfig)
    newconfig = config.copy()
    newconfig["processing"].append(
        {"NNet_QA": {
            "acceptance_threshold": 0.5,
            "model_name": "CantWell"
        }})
    processed_streams = process_streams(raw_streams.copy(),
                                        event,
                                        config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, "test.hdf")
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, raw_streams, label="raw")
        workspace.addStreams(event, processed_streams, label="processed")
        stream1 = raw_streams[0]

        # Get metrics from station summary for raw streams
        summary1 = StationSummary.from_config(stream1)
        s1_df_in = summary1.pgms.sort_values(["IMT", "IMC"])
        array1 = s1_df_in["Result"].to_numpy()

        # Compare to metrics from getStreamMetrics for raw streams
        workspace.calcMetrics(eventid, labels=["raw"])
        summary1_a = workspace.getStreamMetrics(event.id,
                                                stream1[0].stats.network,
                                                stream1[0].stats.station,
                                                "raw")
        s1_df_out = summary1_a.pgms.sort_values(["IMT", "IMC"])
        array2 = s1_df_out["Result"].to_numpy()

        np.testing.assert_allclose(array1, array2, atol=1e-6, rtol=1e-6)
        workspace.close()
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
Example #5
0
    def _compute_event_waveforms(self, event):
        self.eventid = event.id
        logging.info(
            'Computing waveform metrics for event %s...' % self.eventid)
        event_dir = os.path.join(self.gmrecords.data_path, self.eventid)
        workname = os.path.join(event_dir, WORKSPACE_NAME)
        if not os.path.isfile(workname):
            logging.info(
                'No workspace file found for event %s. Please run '
                'subcommand \'assemble\' to generate workspace file.'
                % self.eventid)
            logging.info('Continuing to next event.')
            return event.id

        self.workspace = StreamWorkspace.open(workname)
        self._get_pstreams()

        if not hasattr(self, 'pstreams'):
            logging.info('No processed waveforms available. No waveform '
                         'metrics computed.')
            self.workspace.close()
            return event.id

        for stream in self.pstreams:
            if stream.passed:
                logging.info(
                    'Calculating waveform metrics for %s...'
                    % stream.get_id()
                )
                summary = StationSummary.from_config(
                    stream, event=event, config=self.gmrecords.conf,
                    calc_waveform_metrics=True,
                    calc_station_metrics=False
                )
                xmlstr = summary.get_metric_xml()
                tag = stream.tag
                metricpath = '/'.join([
                    format_netsta(stream[0].stats),
                    format_nslit(stream[0].stats, stream.get_inst(), tag)
                ])
                self.workspace.insert_aux(
                    xmlstr, 'WaveFormMetrics', metricpath,
                    overwrite=self.gmrecords.args.overwrite)
            logging.info('Added waveform metrics to workspace files '
                         'with tag \'%s\'.' % self.gmrecords.args.label)

        self.workspace.close()
        return event.id
Example #6
0
    def calcStreamMetrics(self, eventid, stations=None,
                          labels=None, imclist=None, imtlist=None):
        """Create station metrics for specified event/streams.

        Args:
            eventid (str):
                ID of event to search for in ASDF file.
            stations (list):
                List of stations to create metrics for.
            labels (list):
                List of processing labels to create metrics for.
            imclist (list):
                List of valid component names.
            imtlist (list):
                List of valid IMT names.
            origin (obspy event origin object):
                Origin object for the event.
        """
        if not self.hasEvent(eventid):
            fmt = 'No event matching %s found in workspace.'
            raise KeyError(fmt % eventid)

        streams = self.getStreams(eventid, stations=stations, labels=labels)
        event = self.getEvent(eventid)
        for stream in streams:
            tag = stream.tag
            station, label = tag.split('_')
            if imclist is None and imtlist is None:
                summary = StationSummary.from_config(stream,
                                                     event=event)
            else:
                summary = StationSummary.from_stream(stream,
                                                     components=imclist,
                                                     imts=imtlist,
                                                     event=event)
            xmlstr = summary.getMetricXML()

            path = '%s_%s_%s' % (eventid, summary.station_code.lower(), label)

            # this seems like a lot of effort
            # just to store a string in HDF, but other
            # approached failed. Suggestions are welcome.
            jsonarray = np.frombuffer(xmlstr, dtype=np.uint8)
            dtype = 'WaveFormMetrics'
            self.dataset.add_auxiliary_data(jsonarray,
                                            data_type=dtype,
                                            path=path,
                                            parameters={})
def test_metrics():
    eventid = 'usb000syza'
    datafiles, event = read_data_dir('knet', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = update_config(os.path.join(datadir, 'config_min_freq_0p2.yml'))
    # turn off sta/lta check and snr checks
    # newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    # processed_streams = process_streams(raw_streams, event, config=newconfig)
    newconfig = config.copy()
    newconfig['processing'].append(
        {'NNet_QA': {
            'acceptance_threshold': 0.5,
            'model_name': 'CantWell'
        }})
    processed_streams = process_streams(raw_streams.copy(),
                                        event,
                                        config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, raw_streams, label='raw')
        workspace.addStreams(event, processed_streams, label='processed')
        stream1 = raw_streams[0]

        # Get metrics from station summary for raw streams
        summary1 = StationSummary.from_config(stream1)
        s1_df_in = summary1.pgms.sort_values(['IMT', 'IMC'])
        array1 = s1_df_in['Result'].to_numpy()

        # Compare to metrics from getStreamMetrics for raw streams
        workspace.calcMetrics(eventid, labels=['raw'])
        summary1_a = workspace.getStreamMetrics(event.id,
                                                stream1[0].stats.network,
                                                stream1[0].stats.station,
                                                'raw')
        s1_df_out = summary1_a.pgms.sort_values(['IMT', 'IMC'])
        array2 = s1_df_out['Result'].to_numpy()

        np.testing.assert_allclose(array1, array2, atol=1e-6, rtol=1e-6)
        workspace.close()
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
def test_metrics():
    eventid = 'usb000syza'
    datafiles, event = read_data_dir('knet', eventid, '*')
    datadir = os.path.split(datafiles[0])[0]
    raw_streams = StreamCollection.from_directory(datadir)
    config = get_config()
    # turn off sta/lta check and snr checks
    # newconfig = drop_processing(config, ['check_sta_lta', 'compute_snr'])
    # processed_streams = process_streams(raw_streams, event, config=newconfig)
    newconfig = config.copy()
    newconfig['processing'].append(
        {'NNet_QA': {
            'acceptance_threshold': 0.5,
            'model_name': 'CantWell'
        }})
    processed_streams = process_streams(raw_streams, event, config=newconfig)

    tdir = tempfile.mkdtemp()
    try:
        tfile = os.path.join(tdir, 'test.hdf')
        workspace = StreamWorkspace(tfile)
        workspace.addEvent(event)
        workspace.addStreams(event, raw_streams, label='raw')
        workspace.addStreams(event, processed_streams, label='processed')
        stream1 = raw_streams[0]
        summary1 = StationSummary.from_config(stream1)
        s1_df_in = summary1.pgms.sort_values(['IMT', 'IMC'])
        array1 = s1_df_in['Result'].as_matrix()
        workspace.calcStreamMetrics(eventid, labels=['raw'])
        workspace.calcStationMetrics(event.id, labels=['raw'])
        pstreams2 = workspace.getStreams(event.id, labels=['processed'])
        assert pstreams2[0].getStreamParamKeys() == ['nnet_qa']
        summary1_a = workspace.getStreamMetrics(event.id,
                                                stream1[0].stats.network,
                                                stream1[0].stats.station,
                                                'raw')
        s1_df_out = summary1_a.pgms.sort_values(['IMT', 'IMC'])
        array2 = s1_df_out['Result'].as_matrix()
        np.testing.assert_almost_equal(array1, array2, decimal=4)

        workspace.close()
    except Exception as e:
        raise (e)
    finally:
        shutil.rmtree(tdir)
Example #9
0
def test_bhrc():
    datafiles, origin = read_data_dir("bhrc", "usp000jq5p")

    # make sure format checker works
    assert is_bhrc(datafiles[0])

    raw_streams = []
    for dfile in datafiles:
        raw_streams += read_bhrc(dfile)

    peaks = {
        "5528": 4.793910,
        "5529": 1.024440,
        "5522": 1.595120,
        "5523": 2.291470,
        "5520": 26.189800,
        "5526": 1.319720,
    }

    for stream in raw_streams:
        summary = StationSummary.from_config(stream)
        cmp_value = peaks[summary.station_code]
        pga = summary.pgms.loc["PGA", "GREATER_OF_TWO_HORIZONTALS"].tolist()[0]
        np.testing.assert_almost_equal(cmp_value, pga)
def test_bhrc():
    datafiles, origin = read_data_dir('bhrc', 'usp000jq5p')

    # make sure format checker works
    assert is_bhrc(datafiles[0])

    raw_streams = []
    for dfile in datafiles:
        raw_streams += read_bhrc(dfile)

    peaks = {
        '5528': 4.793910,
        '5529': 1.024440,
        '5522': 1.595120,
        '5523': 2.291470,
        '5520': 26.189800,
        '5526': 1.319720
    }

    for stream in raw_streams:
        summary = StationSummary.from_config(stream)
        cmp_value = peaks[summary.station_code]
        pga = summary.pgms.loc['PGA', 'GREATER_OF_TWO_HORIZONTALS'].tolist()[0]
        np.testing.assert_almost_equal(cmp_value, pga)
Example #11
0
    def calcMetrics(self, eventid, stations=None, labels=None, config=None,
                    streams=None, stream_label=None, rupture_file=None,
                    calc_station_metrics=True, calc_waveform_metrics=True):
        """
        Calculate waveform and/or station metrics for a set of waveforms.

        Args:
            eventid (str):
                ID of event to search for in ASDF file.
            stations (list):
                List of stations to create metrics for.
            labels (list):
                List of processing labels to create metrics for.
            config (dict):
                Configuration dictionary.
            streams (StreamCollection):
                Optional StreamCollection object to create metrics for.
            stream_label (str):
                Label to be used in the metrics path when providing a
                StreamCollection.
            rupture_file (str):
                Path pointing to the rupture file.
            calc_station_metrics (bool):
                Whether to calculate station metrics. Default is True.
            calc_waveform_metrics (bool):
                Whether to calculate waveform metrics. Default is True.
        """
        if not self.hasEvent(eventid):
            fmt = 'No event matching %s found in workspace.'
            raise KeyError(fmt % eventid)

        if streams is None:
            streams = self.getStreams(
                eventid, stations=stations, labels=labels)

        event = self.getEvent(eventid)

        # Load the rupture file
        origin = Origin({
            'id': event.id,
            'netid': '',
            'network': '',
            'lat': event.latitude,
            'lon': event.longitude,
            'depth': event.depth_km,
            'locstring': '',
            'mag': event.magnitude,
            'time': event.time
        })
        rupture = get_rupture(origin, rupture_file)

        vs30_grids = None
        if config is not None:
            if 'vs30' in config['metrics']:
                vs30_grids = config['metrics']['vs30']
                for vs30_name in vs30_grids:
                    vs30_grids[vs30_name]['grid_object'] = GMTGrid.load(
                        vs30_grids[vs30_name]['file'])

        for stream in streams:
            instrument = stream.get_id()
            logging.info('Calculating stream metrics for %s...' % instrument)

            try:
                summary = StationSummary.from_config(
                    stream, event=event, config=config,
                    calc_waveform_metrics=calc_waveform_metrics,
                    calc_station_metrics=calc_station_metrics,
                    rupture=rupture, vs30_grids=vs30_grids)
            except BaseException as pgme:
                fmt = ('Could not create stream metrics for event %s,'
                       'instrument %s: "%s"')
                logging.warning(fmt % (eventid, instrument, str(pgme)))
                continue

            if calc_waveform_metrics and stream.passed:
                xmlstr = summary.get_metric_xml()
                if stream_label is not None:
                    tag = '%s_%s' % (eventid, stream_label)
                else:
                    tag = stream.tag
                metricpath = '/'.join([
                    format_netsta(stream[0].stats),
                    format_nslit(stream[0].stats, stream.get_inst(), tag),
                ])
                self.insert_aux(xmlstr, 'WaveFormMetrics', metricpath)

            if calc_station_metrics:
                xmlstr = summary.get_station_xml()
                metricpath = '/'.join([
                    format_netsta(stream[0].stats),
                    format_nslit(stream[0].stats, stream.get_inst(), eventid)
                ])
                self.insert_aux(xmlstr, 'StationMetrics', metricpath)
def test_stationsummary():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile = datafiles[0]
    origin = Origin(latitude=42.6925, longitude=173.021944)

    target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS',
                                      'H1', 'H2', 'Z', 'ROTD(50.0)',
                                      'ROTD(100.0)']))
    target_imts = np.sort(np.asarray(['SA(1.0)', 'PGA', 'PGV']))
    stream = read_geonet(datafile)[0]
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        stream_summary = StationSummary.from_stream(
            stream,
            ['greater_of_two_horizontals',
             'channels',
             'rotd50',
             'rotd100',
             'invalid'],
            ['sa1.0', 'PGA', 'pgv', 'invalid'], origin)
        original_stream = stream_summary.stream
        stream_summary.stream = []
        final_stream = stream_summary.stream
        assert original_stream == final_stream
        original_code = stream_summary.station_code
        np.testing.assert_array_equal(np.sort(stream_summary.components),
                                      target_imcs)
        np.testing.assert_array_equal(np.sort(stream_summary.imts),
                                      target_imts)
        np.testing.assert_almost_equal(stream_summary.get_pgm('PGA', 'H1'),
                                       99.3173469387755, decimal=1)
        target_available = np.sort(np.asarray([
            'greater_of_two_horizontals', 'geometric_mean', 'arithmetic_mean',
            'channels', 'gmrotd', 'rotd', 'quadratic_mean',
            'radial_transverse']))
        imcs = stream_summary.available_imcs
        np.testing.assert_array_equal(np.sort(imcs), target_available)
        target_available = np.sort(np.asarray(['pga',
                                               'pgv',
                                               'sa',
                                               'arias',
                                               'fas']))
        imts = stream_summary.available_imts
        np.testing.assert_array_equal(np.sort(imts), target_available)
    test_pgms = {
        'PGV': {
            'ROTD(100.0)': 114.24894584734818,
            'ROTD(50.0)': 81.55436750525355,
            'Z': 37.47740000000001,
            'H1': 100.81460000000004,
            'H2': 68.4354,
            'GREATER_OF_TWO_HORIZONTALS': 100.81460000000004},
        'PGA': {
            'ROTD(100.0)': 100.73875535385548,
            'ROTD(50.0)': 91.40178541935455,
            'Z': 183.7722361866693,
            'H1': 99.24999872535474,
            'H2': 81.23467239067368,
            'GREATER_OF_TWO_HORIZONTALS': 99.24999872535474},
        'SA(1.0)': {
            'ROTD(100.0)': 146.9023350124098,
            'ROTD(50.0)': 106.03202302692158,
            'Z': 27.74118995438756,
            'H1': 136.25041187387063,
            'H2': 84.69296738413021,
            'GREATER_OF_TWO_HORIZONTALS': 136.25041187387063}
    }
    pgms = stream_summary.pgms
    for imt_str in test_pgms:
        for imc_str in test_pgms[imt_str]:
            imt = pgms.loc[pgms['IMT'] == imt_str]
            imc = imt.loc[imt['IMC'] == imc_str]
            results = imc.Result.tolist()
            assert len(results) == 1
            np.testing.assert_almost_equal(results[0], test_pgms[imt_str][imc_str],
                                           decimal=10)

    # Test with fas
    stream = read_geonet(datafile)[0]
    stream_summary = StationSummary.from_stream(
        stream,
        ['greater_of_two_horizontals',
         'channels',
         'geometric_mean'],
        ['sa1.0', 'PGA', 'pgv', 'fas2.0'])
    target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS',
                                      'H1', 'H2', 'Z',
                                      'GEOMETRIC_MEAN']))
    target_imts = np.sort(np.asarray(['SA(1.0)',
                                      'PGA', 'PGV', 'FAS(2.0)']))
    np.testing.assert_array_equal(np.sort(stream_summary.components),
                                  target_imcs)
    np.testing.assert_array_equal(np.sort(stream_summary.imts),
                                  target_imts)

    # Test config use
    stream = read_geonet(datafile)[0]
    stream_summary = StationSummary.from_config(stream)
    target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS',
                                      'H1', 'H2', 'Z']))
    target_imts = np.sort(np.asarray(['SA(1.0)', 'SA(2.0)', 'SA(3.0)',
                                      'SA(0.3)', 'PGA', 'PGV', 'FAS(1.0)', 'FAS(2.0)',
                                      'FAS(3.0)', 'FAS(0.3)']))
    assert(stream_summary.smoothing == 'konno_ohmachi')
    assert(stream_summary.bandwidth == 20.0)
    assert(stream_summary.damping == 0.05)

    # test XML output
    stream = read_geonet(datafile)[0]
    imclist = ['greater_of_two_horizontals',
               'channels',
               'rotd50.0',
               'rotd100.0']
    imtlist = ['sa1.0', 'PGA', 'pgv', 'fas2.0', 'arias']
    stream_summary = StationSummary.from_stream(stream, imclist, imtlist)
    xmlstr = stream_summary.get_metric_xml()
    print(xmlstr)

    xml_station = stream_summary.get_station_xml()

    stream2 = StationSummary.from_xml(xmlstr, xml_station)
    cmp1 = np.sort(['GREATER_OF_TWO_HORIZONTALS', 'H1', 'H2', 'Z',
                    'ROTD100.0', 'ROTD50.0'])
    cmp2 = np.sort(stream2.components)
    np.testing.assert_array_equal(cmp1, cmp2)
    imt1 = np.sort(stream_summary.imts)
    imt2 = np.sort(stream2.imts)
    np.testing.assert_array_equal(imt1, imt2)
Example #13
0
    def calcStreamMetrics(self, eventid, stations=None,
                          labels=None, config=None):
        """Create station metrics for specified event/streams.

        Args:
            eventid (str):
                ID of event to search for in ASDF file.
            stations (list):
                List of stations to create metrics for.
            labels (list):
                List of processing labels to create metrics for.
            imclist (list):
                List of valid component names.
            imtlist (list):
                List of valid IMT names.
            config (dict):
                Config dictionary.
        """
        if not self.hasEvent(eventid):
            fmt = 'No event matching %s found in workspace.'
            raise KeyError(fmt % eventid)

        streams = self.getStreams(eventid, stations=stations, labels=labels)
        event = self.getEvent(eventid)
        for stream in streams:
            tag = stream.tag
            instrument = stream.get_id()
            logging.info('Calculating stream metrics for %s...' % instrument)
            parts = tag.split('_')
            if len(parts) > 2:
                label = parts[-1]
                eventid = '_'.join(parts[0:-1])
            else:
                eventid, label = tag.split('_')
            if label not in labels:
                continue
            try:
                summary = StationSummary.from_config(
                    stream, event=event, config=config)
            except Exception as pgme:
                fmt = ('Could not create stream metrics for event %s,'
                       'instrument %s: "%s"')
                logging.warning(fmt % (eventid, instrument, str(pgme)))
                continue

            xmlstr = summary.get_metric_xml()

            metricpath = '/'.join([
                format_netsta(stream[0].stats),
                format_nslit(stream[0].stats, stream.get_inst(), tag),
            ])

            # this seems like a lot of effort
            # just to store a string in HDF, but other
            # approached failed. Suggestions are welcome.
            xmlbytes = xmlstr.encode('utf-8')
            jsonarray = np.frombuffer(xmlbytes, dtype=np.uint8)
            dtype = 'WaveFormMetrics'

            self.dataset.add_auxiliary_data(
                jsonarray,
                data_type=dtype,
                path=metricpath,
                parameters={}
            )
def test_stationsummary():
    datafiles, _ = read_data_dir("geonet", "us1000778i",
                                 "20161113_110259_WTMC_20.V2A")
    datafile = datafiles[0]
    origin = Origin(latitude=42.6925, longitude=173.021944)

    target_imcs = np.sort(
        np.asarray([
            "GREATER_OF_TWO_HORIZONTALS", "H1", "H2", "Z", "ROTD(50.0)",
            "ROTD(100.0)"
        ]))
    target_imts = np.sort(np.asarray(["SA(1.000)", "PGA", "PGV"]))
    stream = read_geonet(datafile)[0]
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        stream_summary = StationSummary.from_stream(
            stream,
            [
                "greater_of_two_horizontals", "channels", "rotd50", "rotd100",
                "invalid"
            ],
            ["sa1.0", "PGA", "pgv", "invalid"],
            origin,
        )
        original_stream = stream_summary.stream
        stream_summary.stream = []
        final_stream = stream_summary.stream
        assert original_stream == final_stream
        original_code = stream_summary.station_code
        np.testing.assert_array_equal(np.sort(stream_summary.components),
                                      target_imcs)
        np.testing.assert_array_equal(np.sort(stream_summary.imts),
                                      target_imts)
        np.testing.assert_almost_equal(stream_summary.get_pgm("PGA", "H1"),
                                       99.3173469387755,
                                       decimal=1)
        target_available = np.sort(
            np.asarray([
                "greater_of_two_horizontals",
                "geometric_mean",
                "arithmetic_mean",
                "channels",
                "gmrotd",
                "rotd",
                "quadratic_mean",
                "radial_transverse",
            ]))
        imcs = stream_summary.available_imcs
        np.testing.assert_array_equal(np.sort(imcs), target_available)
        target_available = np.sort(
            np.asarray([
                "pga", "pgv", "sa", "arias", "fas", "duration",
                "sorted_duration"
            ]))
        imts = stream_summary.available_imts
        np.testing.assert_array_equal(np.sort(imts), target_available)
    test_pgms = {
        "PGV": {
            "ROTD(100.0)": 114.24894584734818,
            "ROTD(50.0)": 81.55436750525355,
            "Z": 37.47740000000001,
            "H1": 100.81460000000004,
            "H2": 68.4354,
            "GREATER_OF_TWO_HORIZONTALS": 100.81460000000004,
        },
        "PGA": {
            "ROTD(100.0)": 100.73875535385548,
            "ROTD(50.0)": 91.40178541935455,
            "Z": 183.7722361866693,
            "H1": 99.24999872535474,
            "H2": 81.23467239067368,
            "GREATER_OF_TWO_HORIZONTALS": 99.24999872535474,
        },
        "SA(1.000)": {
            "ROTD(100.0)": 146.9023350124098,
            "ROTD(50.0)": 106.03202302692158,
            "Z": 27.74118995438756,
            "H1": 136.25041187387063,
            "H2": 84.69296738413021,
            "GREATER_OF_TWO_HORIZONTALS": 136.25041187387063,
        },
    }
    pgms = stream_summary.pgms
    for imt_str in test_pgms:
        for imc_str in test_pgms[imt_str]:
            result = pgms.loc[imt_str, imc_str].Result
            np.testing.assert_almost_equal(result,
                                           test_pgms[imt_str][imc_str],
                                           decimal=10)

    # Test with fas
    stream = read_geonet(datafile)[0]
    stream_summary = StationSummary.from_stream(
        stream,
        ["greater_of_two_horizontals", "channels", "geometric_mean"],
        ["sa1.0", "PGA", "pgv", "fas2.0"],
    )
    target_imcs = np.sort(
        np.asarray(
            ["GEOMETRIC_MEAN", "GREATER_OF_TWO_HORIZONTALS", "H1", "H2", "Z"]))
    target_imts = np.sort(np.asarray(["SA(1.000)", "PGA", "PGV",
                                      "FAS(2.000)"]))
    np.testing.assert_array_equal(np.sort(stream_summary.components),
                                  target_imcs)
    np.testing.assert_array_equal(np.sort(stream_summary.imts), target_imts)

    # Test config use
    stream = read_geonet(datafile)[0]
    stream_summary = StationSummary.from_config(stream)
    target_imcs = np.sort(
        np.asarray(["GREATER_OF_TWO_HORIZONTALS", "H1", "H2", "Z"]))
    target_imts = np.sort(
        np.asarray([
            "SA(1.000)",
            "SA(2.000)",
            "SA(3.000)",
            "SA(0.300)",
            "PGA",
            "PGV",
            "FAS(1.000)",
            "FAS(2.000)",
            "FAS(3.000)",
            "FAS(0.300)",
        ]))
    assert stream_summary.smoothing == "konno_ohmachi"
    assert stream_summary.bandwidth == 20.0
    assert stream_summary.damping == 0.05

    # test XML output
    stream = read_geonet(datafile)[0]
    imclist = [
        "greater_of_two_horizontals", "channels", "rotd50.0", "rotd100.0"
    ]
    imtlist = ["sa1.0", "PGA", "pgv", "fas2.0", "arias"]
    stream_summary = StationSummary.from_stream(stream, imclist, imtlist)
    xmlstr = stream_summary.get_metric_xml()

    xml_station = stream_summary.get_station_xml()

    stream2 = StationSummary.from_xml(xmlstr, xml_station)
    cmp1 = np.sort([
        "GREATER_OF_TWO_HORIZONTALS", "H1", "H2", "ROTD100.0", "ROTD50.0", "Z"
    ])
    cmp2 = np.sort(stream2.components)
    np.testing.assert_array_equal(cmp1, cmp2)
    imt1 = np.sort(stream_summary.imts)
    imt2 = np.sort(stream2.imts)
    np.testing.assert_array_equal(imt1, imt2)
Example #15
0
    def _event_station_metrics(self, event):
        self.eventid = event.id
        logging.info('Computing station metrics for event %s...' %
                     self.eventid)
        event_dir = os.path.join(self.gmrecords.data_path, self.eventid)
        workname = os.path.join(event_dir, WORKSPACE_NAME)
        if not os.path.isfile(workname):
            logging.info(
                'No workspace file found for event %s. Please run '
                'subcommand \'assemble\' to generate workspace file.' %
                self.eventid)
            logging.info('Continuing to next event.')
            return event.id

        self.workspace = StreamWorkspace.open(workname)
        self._get_pstreams()

        rupture_file = get_rupture_file(event_dir)
        origin = Origin({
            'id': self.eventid,
            'netid': '',
            'network': '',
            'lat': event.latitude,
            'lon': event.longitude,
            'depth': event.depth_km,
            'locstring': '',
            'mag': event.magnitude,
            'time': event.time
        })
        rupture = get_rupture(origin, rupture_file)

        if not hasattr(self, 'pstreams'):
            logging.info('No processed waveforms available. No station '
                         'metrics computed.')
            self.workspace.close()
            return

        for stream in self.pstreams:
            logging.info('Calculating station metrics for %s...' %
                         stream.get_id())
            summary = StationSummary.from_config(stream,
                                                 event=event,
                                                 config=self.gmrecords.conf,
                                                 calc_waveform_metrics=False,
                                                 calc_station_metrics=True,
                                                 rupture=rupture,
                                                 vs30_grids=self.vs30_grids)
            xmlstr = summary.get_station_xml()
            metricpath = '/'.join([
                format_netsta(stream[0].stats),
                format_nslit(stream[0].stats, stream.get_inst(), self.eventid)
            ])
            self.workspace.insert_aux(xmlstr,
                                      'StationMetrics',
                                      metricpath,
                                      overwrite=self.gmrecords.args.overwrite)
            logging.info('Added station metrics to workspace files '
                         'with tag \'%s\'.' % self.gmrecords.args.label)

        self.workspace.close()
        return event.id
Example #16
0
    def _event_station_metrics(self, event):
        self.eventid = event.id
        logging.info('Computing station metrics for event %s...' %
                     self.eventid)
        event_dir = os.path.join(self.gmrecords.data_path, self.eventid)
        workname = os.path.join(event_dir, WORKSPACE_NAME)
        if not os.path.isfile(workname):
            logging.info(
                'No workspace file found for event %s. Please run '
                'subcommand \'assemble\' to generate workspace file.' %
                self.eventid)
            logging.info('Continuing to next event.')
            return event.id

        self.workspace = StreamWorkspace.open(workname)
        self._get_pstreams()

        if not (hasattr(self, 'pstreams') and len(self.pstreams) > 0):
            logging.info('No streams found. Nothing to do. Goodbye.')
            self.workspace.close()
            return event.id

        rupture_file = get_rupture_file(event_dir)
        origin = Origin({
            'id': self.eventid,
            'netid': '',
            'network': '',
            'lat': event.latitude,
            'lon': event.longitude,
            'depth': event.depth_km,
            'locstring': '',
            'mag': event.magnitude,
            'time': event.time
        })
        self.origin = origin
        rupture = get_rupture(origin, rupture_file)

        sta_lats = []
        sta_lons = []
        sta_elev = []
        self.sta_repi = []
        self.sta_rhyp = []
        self.sta_baz = []
        for st in self.pstreams:
            sta_lats.append(st[0].stats.coordinates.latitude)
            sta_lons.append(st[0].stats.coordinates.longitude)
            sta_elev.append(st[0].stats.coordinates.elevation)
            geo_tuple = gps2dist_azimuth(st[0].stats.coordinates.latitude,
                                         st[0].stats.coordinates.longitude,
                                         origin.lat, origin.lon)
            self.sta_repi.append(geo_tuple[0] / M_PER_KM)
            self.sta_baz.append(geo_tuple[1])
            self.sta_rhyp.append(
                distance(st[0].stats.coordinates.longitude,
                         st[0].stats.coordinates.latitude,
                         -st[0].stats.coordinates.elevation / M_PER_KM,
                         origin.lon, origin.lat, origin.depth))

        if isinstance(rupture, PointRupture):
            self._get_ps2ff_splines()
            rjb_hat = self.rjb_spline(self.sta_repi)
            rjb_mean = rjb_hat[0]
            rjb_var = rjb_hat[1]
            rrup_hat = self.rrup_spline(self.sta_repi)
            rrup_mean = rrup_hat[0]
            rrup_var = rrup_hat[1]
            gc2_rx = np.full_like(rjb_mean, np.nan)
            gc2_ry = np.full_like(rjb_mean, np.nan)
            gc2_ry0 = np.full_like(rjb_mean, np.nan)
            gc2_U = np.full_like(rjb_mean, np.nan)
            gc2_T = np.full_like(rjb_mean, np.nan)
        else:
            logging.info('******************************')
            logging.info('* Found rupture              *')
            logging.info('******************************')
            sta_lons = np.array(sta_lons)
            sta_lats = np.array(sta_lats)
            elev = np.full_like(sta_lons, ELEVATION_FOR_DISTANCE_CALCS)
            rrup_mean, rrup_var = rupture.computeRrup(sta_lons, sta_lats, elev)
            rjb_mean, rjb_var = rupture.computeRjb(sta_lons, sta_lats, elev)
            rrup_var = np.full_like(rrup_mean, np.nan)
            rjb_var = np.full_like(rjb_mean, np.nan)
            gc2_dict = rupture.computeGC2(sta_lons, sta_lats, elev)
            gc2_rx = gc2_dict['rx']
            gc2_ry = gc2_dict['ry']
            gc2_ry0 = gc2_dict['ry0']
            gc2_U = gc2_dict['U']
            gc2_T = gc2_dict['T']

            # If we don't have a point rupture, then back azimuth needs
            # to be calculated to the closest point on the rupture
            self.sta_baz = []
            for i in range(len(self.pstreams)):
                dists = []
                bazs = []
                for quad in rupture._quadrilaterals:
                    P0, P1, P2, P3 = quad
                    for point in [P0, P1]:
                        dist, az, baz = gps2dist_azimuth(
                            point.y, point.x, sta_lats[i], sta_lons[i])
                        dists.append(dist)
                        bazs.append(baz)
                self.sta_baz.append(bazs[np.argmin(dists)])

        for i, stream in enumerate(self.pstreams):
            logging.info('Calculating station metrics for %s...' %
                         stream.get_id())
            summary = StationSummary.from_config(stream,
                                                 event=event,
                                                 config=self.gmrecords.conf,
                                                 calc_waveform_metrics=False,
                                                 calc_station_metrics=False,
                                                 rupture=rupture,
                                                 vs30_grids=self.vs30_grids)

            summary._distances = {
                'epicentral': self.sta_repi[i],
                'hypocentral': self.sta_rhyp[i],
                'rupture': rrup_mean[i],
                'rupture_var': rrup_var[i],
                'joyner_boore': rjb_mean[i],
                'joyner_boore_var': rjb_var[i],
                'gc2_rx': gc2_rx[i],
                'gc2_ry': gc2_ry[i],
                'gc2_ry0': gc2_ry0[i],
                'gc2_U': gc2_U[i],
                'gc2_T': gc2_T[i]
            }
            summary._back_azimuth = self.sta_baz[i]
            if self.vs30_grids is not None:
                for vs30_name in self.vs30_grids.keys():
                    tmpgrid = self.vs30_grids[vs30_name]
                    summary._vs30[vs30_name] = {
                        'value':
                        tmpgrid['grid_object'].getValue(
                            float(sta_lats[i]), float(sta_lons[i])),
                        'column_header':
                        tmpgrid['column_header'],
                        'readme_entry':
                        tmpgrid['readme_entry'],
                        'units':
                        tmpgrid['units']
                    }

            xmlstr = summary.get_station_xml()
            metricpath = '/'.join([
                format_netsta(stream[0].stats),
                format_nslit(stream[0].stats, stream.get_inst(), self.eventid)
            ])
            self.workspace.insert_aux(xmlstr,
                                      'StationMetrics',
                                      metricpath,
                                      overwrite=self.gmrecords.args.overwrite)
            logging.info('Added station metrics to workspace files '
                         'with tag \'%s\'.' % self.gmrecords.args.label)

        self.workspace.close()
        return event.id
Example #17
0
    def getTables(self, label, config=None):
        '''Retrieve dataframes containing event information and IMC/IMT metrics.

        Args:
            label (str): Calculate metrics only for the given label.
            config (dict): Config dictionary.

        Returns:
            tuple: Elements are:
                   - pandas DataFrame containing event information:
                     - id Event ID
                     - time Time of origin
                     - latitude Latitude of origin
                     - longitude Longitude of origin
                     - depth Depth of origin (km)
                     - magnitude Magnitude at origin (km)
                   - dictionary of DataFrames, where keys are IMCs and
                     values are DataFrames with columns:
                     - EarthquakeId Earthquake id from event table
                     - Network Network code
                     - StationCode Station code
                     - StationDescription Long form description of station
                       location (may be blank)
                     - StationLatitude Station latitude
                     - StationLongitude Station longitude
                     - StationElevation Station elevation
                     - SamplingRate Data sampling rate in Hz
                     - EpicentralDistance Distance from origin epicenter
                       (surface) to station
                     - HypocentralDistance Distance from origin hypocenter
                       (depth) to station
                     - HN1Lowpass Low pass filter corner frequency for first
                       horizontal channel
                     - HN1Highpass High pass filter corner frequency for first
                       horizontal channel
                     - HN2Lowpass Low pass filter corner frequency for second
                       horizontal channel
                     - HN2Highpass High pass filter corner frequency for
                       second horizontal channel
                     - ...desired IMTs (PGA, PGV, SA(0.3), etc.)
        '''
        event_table = pd.DataFrame(columns=EVENT_TABLE_COLUMNS)
        imc_tables = {}
        for eventid in self.getEventIds():
            event = self.getEvent(eventid)
            edict = {
                'id': event.id,
                'time': event.time,
                'latitude': event.latitude,
                'longitude': event.longitude,
                'depth': event.depth_km,
                'magnitude': event.magnitude
            }
            event_table = event_table.append(edict, ignore_index=True)
            streams = self.getStreams(eventid, labels=[label])
            for stream in streams:
                if not stream.passed:
                    continue
                if config is None:
                    station = stream[0].stats.station
                    summary = self.getStreamMetrics(eventid, station, label)
                else:
                    summary = StationSummary.from_config(
                        stream, event=event, config=config)

                imclist = summary.pgms['IMC'].unique().tolist()
                imtlist = summary.pgms['IMT'].unique().tolist()
                for imc in imclist:
                    if imc not in imc_tables:
                        cols = FLATFILE_COLUMNS + imtlist
                        imc_table = pd.DataFrame(columns=cols)
                        row = _get_table_row(stream, summary, event, imc)
                        if not len(row):
                            continue
                        imc_table = imc_table.append(row, ignore_index=True)
                        imc_tables[imc] = imc_table
                    else:
                        imc_table = imc_tables[imc]
                        row = _get_table_row(stream, summary, event, imc)
                        if not len(row):
                            continue
                        imc_table = imc_table.append(row, ignore_index=True)
                        imc_tables[imc] = imc_table

        return (event_table, imc_tables)
def test_stationsummary():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile = datafiles[0]
    origin = Origin(latitude=42.6925, longitude=173.021944)

    target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS',
                                      'HN1', 'HN2', 'HNZ', 'ROTD(50.0)',
                                      'ROTD(100.0)']))
    target_imts = np.sort(np.asarray(['SA(1.0)', 'PGA', 'PGV']))
    stream = read_geonet(datafile)[0]
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        stream_summary = StationSummary.from_stream(
            stream,
            ['greater_of_two_horizontals',
             'channels',
             'rotd50',
             'rotd100',
             'invalid'],
            ['sa1.0', 'PGA', 'pgv', 'invalid'], origin)
        original_stream = stream_summary.stream
        stream_summary.stream = []
        final_stream = stream_summary.stream
        assert original_stream == final_stream
        original_code = stream_summary.station_code
        np.testing.assert_array_equal(np.sort(stream_summary.components),
                                      target_imcs)
        np.testing.assert_array_equal(np.sort(stream_summary.imts),
                                      target_imts)
        np.testing.assert_almost_equal(stream_summary.get_pgm('PGA', 'HN1'),
                                       99.3173469387755, decimal=1)
        target_available = np.sort(np.asarray([
            'greater_of_two_horizontals', 'geometric_mean', 'arithmetic_mean',
            'channels', 'gmrotd', 'rotd', 'quadratic_mean',
            'radial_transverse']))
        imcs = stream_summary.available_imcs
        np.testing.assert_array_equal(np.sort(imcs), target_available)
        target_available = np.sort(np.asarray(['pga',
                                               'pgv',
                                               'sa',
                                               'arias',
                                               'fas']))
        imts = stream_summary.available_imts
        np.testing.assert_array_equal(np.sort(imts), target_available)
    test_pgms = {
        'PGV': {
            'ROTD(100.0)': 114.24894584734818,
            'ROTD(50.0)': 81.55436750525355,
            'HNZ': 37.47740000000001,
            'HN1': 100.81460000000004,
            'HN2': 68.4354,
            'GREATER_OF_TWO_HORIZONTALS': 100.81460000000004},
        'PGA': {
            'ROTD(100.0)': 100.73875535385548,
            'ROTD(50.0)': 91.40178541935455,
            'HNZ': 183.7722361866693,
            'HN1': 99.24999872535474,
            'HN2': 81.23467239067368,
            'GREATER_OF_TWO_HORIZONTALS': 99.24999872535474},
        'SA(1.0)': {
            'ROTD(100.0)': 146.9023350124098,
            'ROTD(50.0)': 106.03202302692158,
            'HNZ': 27.74118995438756,
            'HN1': 136.25041187387063,
            'HN2': 84.69296738413021,
            'GREATER_OF_TWO_HORIZONTALS': 136.25041187387063}
    }
    pgms = stream_summary.pgms
    for imt_str in test_pgms:
        for imc_str in test_pgms[imt_str]:
            imt = pgms.loc[pgms['IMT'] == imt_str]
            imc = imt.loc[imt['IMC'] == imc_str]
            results = imc.Result.tolist()
            assert len(results) == 1
            np.testing.assert_almost_equal(results[0], test_pgms[imt_str][imc_str],
                    decimal=10)

    # Test with fas
    stream = read_geonet(datafile)[0]
    stream_summary = StationSummary.from_stream(
        stream,
        ['greater_of_two_horizontals',
         'channels',
         'geometric_mean'],
        ['sa1.0', 'PGA', 'pgv', 'fas2.0'])
    target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS',
                                      'HN1', 'HN2', 'HNZ',
                                      'GEOMETRIC_MEAN']))
    target_imts = np.sort(np.asarray(['SA(1.0)',
                                      'PGA', 'PGV', 'FAS(2.0)']))
    np.testing.assert_array_equal(np.sort(stream_summary.components),
                                  target_imcs)
    np.testing.assert_array_equal(np.sort(stream_summary.imts),
                                  target_imts)

    # Test config use
    stream = read_geonet(datafile)[0]
    stream_summary = StationSummary.from_config(stream)
    target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS',
                                      'HN1', 'HN2', 'HNZ']))
    target_imts = np.sort(np.asarray(['SA(1.0)', 'SA(2.0)', 'SA(3.0)',
                                      'SA(0.3)', 'PGA', 'PGV', 'FAS(1.0)', 'FAS(2.0)',
                                      'FAS(3.0)', 'FAS(0.3)']))
    assert(stream_summary.smoothing == 'konno_ohmachi')
    assert(stream_summary.bandwidth == 20.0)
    assert(stream_summary.damping == 0.05)

    # test XML output
    stream = read_geonet(datafile)[0]
    imclist = ['greater_of_two_horizontals',
               'channels',
               'rotd50.0',
               'rotd100.0']
    imtlist = ['sa1.0', 'PGA', 'pgv', 'fas2.0', 'arias']
    stream_summary = StationSummary.from_stream(stream, imclist, imtlist)
    xmlstr = stream_summary.getMetricXML()
    print(xmlstr.decode('utf-8'))

    stream2 = StationSummary.fromMetricXML(xmlstr)
    cmp1 = np.sort(['GREATER_OF_TWO_HORIZONTALS', 'HN1', 'HN2', 'HNZ',
            'ROTD100.0', 'ROTD50.0'])
    cmp2 = np.sort(stream2.components)
    np.testing.assert_array_equal(cmp1, cmp2)
    imt1 = np.sort(stream_summary.imts)
    imt2 = np.sort(stream2.imts)
    np.testing.assert_array_equal(imt1, imt2)