コード例 #1
0
    def getStreamMetrics(self, eventid, station, label):
        """Extract a StationSummary object from the ASDF file for a given input Stream.

        Args:
            eventid (str):
                ID of event to search for in ASDF file.
            station (str):
                Station to return metrics from.
            label (str):
                Processing label to return metrics from.

        Returns:
            StationSummary: Object containing all stream metrics.
        """
        if 'WaveFormMetrics' not in self.dataset.auxiliary_data:
            logging.warning('Waveform metrics not found in workspace, '
                            'cannot get stream metrics.')
        auxholder = self.dataset.auxiliary_data.WaveFormMetrics
        stream_path = '%s_%s_%s' % (eventid, station.lower(), label)
        if stream_path not in auxholder:
            logging.warning(
                'Stream path (%s) not in WaveFormMetrics auxiliary_data.' %
                stream_path)
            return

        bytelist = auxholder[stream_path].data[:].tolist()
        xml_stream = ''.join([chr(b) for b in bytelist])
        xml_stream = xml_stream.encode('utf-8')

        if 'StationMetrics' not in self.dataset.auxiliary_data:
            raise KeyError('Station metrics not found in workspace.')
        auxholder = self.dataset.auxiliary_data.StationMetrics
        stream_path = '%s_%s_%s' % (eventid, station.lower(), label)
        if stream_path not in auxholder:
            logging.warning(
                'Stream path (%s) not in StationMetrics auxiliary_data.' %
                stream_path)
            return

        bytelist = auxholder[stream_path].data[:].tolist()
        xml_station = ''.join([chr(b) for b in bytelist])
        xml_station = xml_station.encode('utf-8')

        summary = StationSummary.from_xml(xml_stream, xml_station)
        return summary
コード例 #2
0
    def getStreamMetrics(self, eventid, network, station, label, streams=None,
                         stream_label=None, config=None):
        """Extract a StationSummary object from the ASDF file for a given
        input Stream.

        Args:
            eventid (str):
                ID of event to search for in ASDF file.
            network (str):
                Network to return metrics from.
            station (str):
                Station to return metrics from.
            label (str):
                Processing label to return metrics from.
            streams (StreamCollection):
                Optional StreamCollection object to get metrics for.
            stream_label (str):
                Label to be used in the metrics path when providing a
                StreamCollection.
            config (dict):
                Configuration options.

        Returns:
            StationSummary: Object containing all stream metrics or None.
        """
        if 'WaveFormMetrics' not in self.dataset.auxiliary_data:
            msg = ('Waveform metrics not found in workspace, '
                   'cannot get stream metrics.')
            logging.warning(msg)
            return None

        auxholder = self.dataset.auxiliary_data.WaveFormMetrics

        # get the stream matching the eventid, station, and label
        if streams is None:
            streams = self.getStreams(eventid, stations=[station],
                                      labels=[label], config=config)

        # Only get streams that passed and match network
        streams = [st for st in streams if
                   (st.passed and st[0].stats.network == network)]

        if not len(streams):
            fmt = ('Stream matching event ID %s, '
                   'station ID %s, and processing label %s not found in '
                   'workspace.')
            msg = fmt % (eventid, station, label)
            logging.warning(msg)
            return None

        if stream_label is not None:
            stream_tag = '%s_%s' % (eventid, stream_label)
        else:
            stream_tag = streams[0].tag

        metricpath = format_nslit(streams[0][0].stats,
                                  streams[0].get_inst(),
                                  stream_tag)
        top = format_netsta(streams[0][0].stats)
        if top in auxholder:
            tauxholder = auxholder[top]
            if metricpath not in tauxholder:
                fmt = ('Stream metrics path (%s) not in WaveFormMetrics '
                       'auxiliary_data.')
                logging.warning(fmt % metricpath)
                return None

            bytelist = tauxholder[metricpath].data[:].tolist()
            xml_stream = ''.join([chr(b) for b in bytelist])
            xml_stream = xml_stream.encode('utf-8')
        else:
            return

        if 'StationMetrics' not in self.dataset.auxiliary_data:
            logging.warning('Station metrics not found in workspace.')
            return None
        auxholder = self.dataset.auxiliary_data.StationMetrics
        station_path = format_nslit(
            streams[0][0].stats, streams[0].get_inst(), eventid)
        if top in auxholder:
            tauxholder = auxholder[top]
            if station_path not in tauxholder:
                logging.warning(
                    'Stream path (%s) not in StationMetrics auxiliary_data.'
                    % station_path)
                return

            bytelist = tauxholder[station_path].data[:].tolist()
            xml_station = ''.join([chr(b) for b in bytelist])
            xml_station = xml_station.encode('utf-8')
        else:
            return

        summary = StationSummary.from_xml(xml_stream, xml_station)
        return summary
def test_stationsummary():
    datafiles, _ = read_data_dir(
        'geonet', 'us1000778i', '20161113_110259_WTMC_20.V2A')
    datafile = datafiles[0]
    origin = Origin(latitude=42.6925, longitude=173.021944)

    target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS',
                                      'H1', 'H2', 'Z', 'ROTD(50.0)',
                                      'ROTD(100.0)']))
    target_imts = np.sort(np.asarray(['SA(1.0)', 'PGA', 'PGV']))
    stream = read_geonet(datafile)[0]
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        stream_summary = StationSummary.from_stream(
            stream,
            ['greater_of_two_horizontals',
             'channels',
             'rotd50',
             'rotd100',
             'invalid'],
            ['sa1.0', 'PGA', 'pgv', 'invalid'], origin)
        original_stream = stream_summary.stream
        stream_summary.stream = []
        final_stream = stream_summary.stream
        assert original_stream == final_stream
        original_code = stream_summary.station_code
        np.testing.assert_array_equal(np.sort(stream_summary.components),
                                      target_imcs)
        np.testing.assert_array_equal(np.sort(stream_summary.imts),
                                      target_imts)
        np.testing.assert_almost_equal(stream_summary.get_pgm('PGA', 'H1'),
                                       99.3173469387755, decimal=1)
        target_available = np.sort(np.asarray([
            'greater_of_two_horizontals', 'geometric_mean', 'arithmetic_mean',
            'channels', 'gmrotd', 'rotd', 'quadratic_mean',
            'radial_transverse']))
        imcs = stream_summary.available_imcs
        np.testing.assert_array_equal(np.sort(imcs), target_available)
        target_available = np.sort(np.asarray(['pga',
                                               'pgv',
                                               'sa',
                                               'arias',
                                               'fas']))
        imts = stream_summary.available_imts
        np.testing.assert_array_equal(np.sort(imts), target_available)
    test_pgms = {
        'PGV': {
            'ROTD(100.0)': 114.24894584734818,
            'ROTD(50.0)': 81.55436750525355,
            'Z': 37.47740000000001,
            'H1': 100.81460000000004,
            'H2': 68.4354,
            'GREATER_OF_TWO_HORIZONTALS': 100.81460000000004},
        'PGA': {
            'ROTD(100.0)': 100.73875535385548,
            'ROTD(50.0)': 91.40178541935455,
            'Z': 183.7722361866693,
            'H1': 99.24999872535474,
            'H2': 81.23467239067368,
            'GREATER_OF_TWO_HORIZONTALS': 99.24999872535474},
        'SA(1.0)': {
            'ROTD(100.0)': 146.9023350124098,
            'ROTD(50.0)': 106.03202302692158,
            'Z': 27.74118995438756,
            'H1': 136.25041187387063,
            'H2': 84.69296738413021,
            'GREATER_OF_TWO_HORIZONTALS': 136.25041187387063}
    }
    pgms = stream_summary.pgms
    for imt_str in test_pgms:
        for imc_str in test_pgms[imt_str]:
            imt = pgms.loc[pgms['IMT'] == imt_str]
            imc = imt.loc[imt['IMC'] == imc_str]
            results = imc.Result.tolist()
            assert len(results) == 1
            np.testing.assert_almost_equal(results[0], test_pgms[imt_str][imc_str],
                                           decimal=10)

    # Test with fas
    stream = read_geonet(datafile)[0]
    stream_summary = StationSummary.from_stream(
        stream,
        ['greater_of_two_horizontals',
         'channels',
         'geometric_mean'],
        ['sa1.0', 'PGA', 'pgv', 'fas2.0'])
    target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS',
                                      'H1', 'H2', 'Z',
                                      'GEOMETRIC_MEAN']))
    target_imts = np.sort(np.asarray(['SA(1.0)',
                                      'PGA', 'PGV', 'FAS(2.0)']))
    np.testing.assert_array_equal(np.sort(stream_summary.components),
                                  target_imcs)
    np.testing.assert_array_equal(np.sort(stream_summary.imts),
                                  target_imts)

    # Test config use
    stream = read_geonet(datafile)[0]
    stream_summary = StationSummary.from_config(stream)
    target_imcs = np.sort(np.asarray(['GREATER_OF_TWO_HORIZONTALS',
                                      'H1', 'H2', 'Z']))
    target_imts = np.sort(np.asarray(['SA(1.0)', 'SA(2.0)', 'SA(3.0)',
                                      'SA(0.3)', 'PGA', 'PGV', 'FAS(1.0)', 'FAS(2.0)',
                                      'FAS(3.0)', 'FAS(0.3)']))
    assert(stream_summary.smoothing == 'konno_ohmachi')
    assert(stream_summary.bandwidth == 20.0)
    assert(stream_summary.damping == 0.05)

    # test XML output
    stream = read_geonet(datafile)[0]
    imclist = ['greater_of_two_horizontals',
               'channels',
               'rotd50.0',
               'rotd100.0']
    imtlist = ['sa1.0', 'PGA', 'pgv', 'fas2.0', 'arias']
    stream_summary = StationSummary.from_stream(stream, imclist, imtlist)
    xmlstr = stream_summary.get_metric_xml()
    print(xmlstr)

    xml_station = stream_summary.get_station_xml()

    stream2 = StationSummary.from_xml(xmlstr, xml_station)
    cmp1 = np.sort(['GREATER_OF_TWO_HORIZONTALS', 'H1', 'H2', 'Z',
                    'ROTD100.0', 'ROTD50.0'])
    cmp2 = np.sort(stream2.components)
    np.testing.assert_array_equal(cmp1, cmp2)
    imt1 = np.sort(stream_summary.imts)
    imt2 = np.sort(stream2.imts)
    np.testing.assert_array_equal(imt1, imt2)
コード例 #4
0
    def getStreamMetrics(self, eventid, network, station, label):
        """Extract a StationSummary object from the ASDF file for a given input Stream.

        Args:
            eventid (str):
                ID of event to search for in ASDF file.
            network (str):
                Network to return metrics from.
            station (str):
                Station to return metrics from.
            label (str):
                Processing label to return metrics from.

        Returns:
            StationSummary: Object containing all stream metrics or None.
        """
        if 'WaveFormMetrics' not in self.dataset.auxiliary_data:
            logging.warning('Waveform metrics not found in workspace, '
                            'cannot get stream metrics.')
        auxholder = self.dataset.auxiliary_data.WaveFormMetrics

        # get the stream matching the eventid, station, and label
        streams = self.getStreams(eventid, stations=[station],
                                  labels=[label])

        # Only get streams that passed and match network
        streams = [st for st in streams if
                   (st.passed and st[0].stats.network == network)]

        if not len(streams):
            fmt = '''Stream matching event ID %s,
            station ID %s, and processing label %s not found in workspace.'''
            msg = fmt % (eventid, station, label)
            logging.warning(msg)
            return None

        metricpath = format_nslit(streams[0][0].stats,
                                  streams[0].get_inst(),
                                  streams[0].tag)
        top = format_netsta(streams[0][0].stats)
        if top in auxholder:
            tauxholder = auxholder[top]
            if metricpath not in tauxholder:
                fmt = 'Stream metrics path (%s) not in WaveFormMetrics auxiliary_data.'
                logging.warning(fmt % metricpath)
                return None

            bytelist = tauxholder[metricpath].data[:].tolist()
            xml_stream = ''.join([chr(b) for b in bytelist])
            xml_stream = xml_stream.encode('utf-8')

        if 'StationMetrics' not in self.dataset.auxiliary_data:
            raise KeyError('Station metrics not found in workspace.')
        auxholder = self.dataset.auxiliary_data.StationMetrics
        station_path = format_nslit(streams[0][0].stats, streams[0].get_inst(), eventid)
        if top in auxholder:
            tauxholder = auxholder[top]
            if station_path not in tauxholder:
                logging.warning(
                    'Stream path (%s) not in StationMetrics auxiliary_data.'
                    % station_path)
                return

            bytelist = tauxholder[station_path].data[:].tolist()
            xml_station = ''.join([chr(b) for b in bytelist])
            xml_station = xml_station.encode('utf-8')

        summary = StationSummary.from_xml(xml_stream, xml_station)
        return summary
コード例 #5
0
def test_stationsummary():
    datafiles, _ = read_data_dir("geonet", "us1000778i",
                                 "20161113_110259_WTMC_20.V2A")
    datafile = datafiles[0]
    origin = Origin(latitude=42.6925, longitude=173.021944)

    target_imcs = np.sort(
        np.asarray([
            "GREATER_OF_TWO_HORIZONTALS", "H1", "H2", "Z", "ROTD(50.0)",
            "ROTD(100.0)"
        ]))
    target_imts = np.sort(np.asarray(["SA(1.000)", "PGA", "PGV"]))
    stream = read_geonet(datafile)[0]
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        stream_summary = StationSummary.from_stream(
            stream,
            [
                "greater_of_two_horizontals", "channels", "rotd50", "rotd100",
                "invalid"
            ],
            ["sa1.0", "PGA", "pgv", "invalid"],
            origin,
        )
        original_stream = stream_summary.stream
        stream_summary.stream = []
        final_stream = stream_summary.stream
        assert original_stream == final_stream
        original_code = stream_summary.station_code
        np.testing.assert_array_equal(np.sort(stream_summary.components),
                                      target_imcs)
        np.testing.assert_array_equal(np.sort(stream_summary.imts),
                                      target_imts)
        np.testing.assert_almost_equal(stream_summary.get_pgm("PGA", "H1"),
                                       99.3173469387755,
                                       decimal=1)
        target_available = np.sort(
            np.asarray([
                "greater_of_two_horizontals",
                "geometric_mean",
                "arithmetic_mean",
                "channels",
                "gmrotd",
                "rotd",
                "quadratic_mean",
                "radial_transverse",
            ]))
        imcs = stream_summary.available_imcs
        np.testing.assert_array_equal(np.sort(imcs), target_available)
        target_available = np.sort(
            np.asarray([
                "pga", "pgv", "sa", "arias", "fas", "duration",
                "sorted_duration"
            ]))
        imts = stream_summary.available_imts
        np.testing.assert_array_equal(np.sort(imts), target_available)
    test_pgms = {
        "PGV": {
            "ROTD(100.0)": 114.24894584734818,
            "ROTD(50.0)": 81.55436750525355,
            "Z": 37.47740000000001,
            "H1": 100.81460000000004,
            "H2": 68.4354,
            "GREATER_OF_TWO_HORIZONTALS": 100.81460000000004,
        },
        "PGA": {
            "ROTD(100.0)": 100.73875535385548,
            "ROTD(50.0)": 91.40178541935455,
            "Z": 183.7722361866693,
            "H1": 99.24999872535474,
            "H2": 81.23467239067368,
            "GREATER_OF_TWO_HORIZONTALS": 99.24999872535474,
        },
        "SA(1.000)": {
            "ROTD(100.0)": 146.9023350124098,
            "ROTD(50.0)": 106.03202302692158,
            "Z": 27.74118995438756,
            "H1": 136.25041187387063,
            "H2": 84.69296738413021,
            "GREATER_OF_TWO_HORIZONTALS": 136.25041187387063,
        },
    }
    pgms = stream_summary.pgms
    for imt_str in test_pgms:
        for imc_str in test_pgms[imt_str]:
            result = pgms.loc[imt_str, imc_str].Result
            np.testing.assert_almost_equal(result,
                                           test_pgms[imt_str][imc_str],
                                           decimal=10)

    # Test with fas
    stream = read_geonet(datafile)[0]
    stream_summary = StationSummary.from_stream(
        stream,
        ["greater_of_two_horizontals", "channels", "geometric_mean"],
        ["sa1.0", "PGA", "pgv", "fas2.0"],
    )
    target_imcs = np.sort(
        np.asarray(
            ["GEOMETRIC_MEAN", "GREATER_OF_TWO_HORIZONTALS", "H1", "H2", "Z"]))
    target_imts = np.sort(np.asarray(["SA(1.000)", "PGA", "PGV",
                                      "FAS(2.000)"]))
    np.testing.assert_array_equal(np.sort(stream_summary.components),
                                  target_imcs)
    np.testing.assert_array_equal(np.sort(stream_summary.imts), target_imts)

    # Test config use
    stream = read_geonet(datafile)[0]
    stream_summary = StationSummary.from_config(stream)
    target_imcs = np.sort(
        np.asarray(["GREATER_OF_TWO_HORIZONTALS", "H1", "H2", "Z"]))
    target_imts = np.sort(
        np.asarray([
            "SA(1.000)",
            "SA(2.000)",
            "SA(3.000)",
            "SA(0.300)",
            "PGA",
            "PGV",
            "FAS(1.000)",
            "FAS(2.000)",
            "FAS(3.000)",
            "FAS(0.300)",
        ]))
    assert stream_summary.smoothing == "konno_ohmachi"
    assert stream_summary.bandwidth == 20.0
    assert stream_summary.damping == 0.05

    # test XML output
    stream = read_geonet(datafile)[0]
    imclist = [
        "greater_of_two_horizontals", "channels", "rotd50.0", "rotd100.0"
    ]
    imtlist = ["sa1.0", "PGA", "pgv", "fas2.0", "arias"]
    stream_summary = StationSummary.from_stream(stream, imclist, imtlist)
    xmlstr = stream_summary.get_metric_xml()

    xml_station = stream_summary.get_station_xml()

    stream2 = StationSummary.from_xml(xmlstr, xml_station)
    cmp1 = np.sort([
        "GREATER_OF_TWO_HORIZONTALS", "H1", "H2", "ROTD100.0", "ROTD50.0", "Z"
    ])
    cmp2 = np.sort(stream2.components)
    np.testing.assert_array_equal(cmp1, cmp2)
    imt1 = np.sort(stream_summary.imts)
    imt2 = np.sort(stream2.imts)
    np.testing.assert_array_equal(imt1, imt2)