示例#1
0
def read_volume_one(filename, location='', alternate=False):
    """Read channel data from USC volume 1 text file.

    Args:
        filename (str):
            Input DMG V1 filename.

    Returns:
        tuple: (list of obspy Trace, int line offset)
    """
    volume = VOLUMES['V1']
    # count the number of lines in the file
    with open(filename) as f:
        line_count = sum(1 for _ in f)
    # read as many channels as are present in the file
    line_offset = 0
    stream = StationStream([])
    while line_offset < line_count:
        trace, line_offset = _read_channel(filename,
                                           line_offset,
                                           volume,
                                           location=location,
                                           alternate=alternate)
        # store the trace if the station type is in the valid_station_types
        # list or store the trace if there is no valid_station_types list
        if trace is not None:
            stream.append(trace)

    return [stream]
    def get_derivative(self):
        """
        Calculated the derivative of each trace's data.

        Args:
            damping (float):
                Damping for spectral amplitude calculations. Default is None.
            period (float):
                Period for spectral amplitude calculations. Default is None.
            times (numpy.ndarray):
                Times for the spectral amplitude calculations. Default is None.
            allow_nans (bool):
                Should nans be allowed in the smoothed spectra. If False, then
                the number of points in the FFT will be computed to ensure
                that nans will not result in the smoothed spectra.

        Returns:
            stream: StationStream with the differentiated data.
        """
        stream = StationStream([])
        for trace in self.transform_data:
            differentiated_trace = trace.copy().differentiate()
            differentiated_trace.stats['units'] = 'acc'
            stream.append(differentiated_trace)
        return stream
示例#3
0
    def get_arias(self):
        """
        Performs calculation of arias intensity.

        Returns:
            arias_intensities: Dictionary of arias intensity for each channel.
        """
        arias_intensities = {}
        arias_stream = StationStream([])
        for trace in self.reduction_data:
            dt = trace.stats["delta"]
            # convert from cm/s/s to m/s/s
            acc = trace.data * 0.01

            # Calculate Arias Intensity
            integrated_acc2 = integrate.cumtrapz(acc * acc, dx=dt)
            arias_intensity = integrated_acc2 * np.pi * GAL_TO_PCTG / 2

            # Create a copy of stats so we don't modify original data
            stats = trace.stats.copy()
            channel = stats.channel
            stats.standard.units_type = "vel"
            stats.npts = len(arias_intensity)
            arias_stream.append(StationTrace(arias_intensity, stats))
            arias_intensities[channel] = np.abs(np.max(arias_intensity))
        self.arias_stream = arias_stream
        return arias_intensities
示例#4
0
    def get_integral(self, config=None):
        """
        Calculated the integral of each trace's data.

        Returns:
            stream: StationStream with the integrated data.
        """
        stream = StationStream([])
        for trace in self.transform_data:
            integrated_trace = trace.copy().integrate(config=config)
            stream.append(integrated_trace)
        return stream
示例#5
0
def read_cosmos(filename, config=None, **kwargs):
    """Read COSMOS V1/V2 strong motion file.

    There is one extra key in the Stats object for each Trace -
    "process_level". This will be set to either "V1" or "V2".

    Args:
        filename (str):
            Path to possible COSMOS V1/V2 data file.
        config (dict):
            Dictionary containing configuration.
        kwargs (ref):
            valid_station_types (list): List of valid station types. See table
                6  in the COSMOS strong motion data format documentation for
                station type codes.
            Other arguments will be ignored.

    Returns:
        list: List of StationStreams containing three channels of acceleration
        data (cm/s**2).
    """
    logging.debug("Starting read_cosmos.")
    if not is_cosmos(filename, config):
        raise Exception(
            f"{filename} is not a valid COSMOS strong motion data file.")
    # get list of valid stations
    valid_station_types = kwargs.get("valid_station_types", None)
    # get list of valid stations
    location = kwargs.get("location", "")

    # count the number of lines in the file
    with open(filename, encoding="utf-8") as f:
        line_count = sum(1 for _ in f)

    # read as many channels as are present in the file
    line_offset = 0
    stream = StationStream([])
    while line_offset < line_count:
        trace, line_offset = _read_channel(filename,
                                           line_offset,
                                           location=location)
        # store the trace if the station type is in the valid_station_types
        # list or store the trace if there is no valid_station_types list
        if valid_station_types is not None:
            scode = trace.stats["format_specific"]["station_code"]
            if scode in valid_station_types:
                stream.append(trace)
        else:
            stream.append(trace)

    return [stream]
    def get_integral(self):
        """
        Calculated the integral of each trace's data.

        Returns:
            stream: StationStream with the integrated data.
        """
        stream = StationStream([])
        for trace in self.transform_data:
            integrated_trace = trace.copy().integrate()
            if integrated_trace.stats.standard.units == 'acc':
                integrated_trace.stats.standard.units = 'vel'
            elif integrated_trace.stats.standard.units == 'vel':
                integrated_trace.stats.standard.units = 'disp'
            stream.append(integrated_trace)
        return stream
示例#7
0
def read_dmg(filename, **kwargs):
    """Read DMG strong motion file.

    Notes:
        CSMIP is synonymous to as DMG in this reader.

    Args:
        filename (str):
            Path to possible DMG data file.
        kwargs (ref):
            units (str): String determining which timeseries is return. Valid
                    options include 'acc', 'vel', 'disp'. Default is 'acc'.
            Other arguments will be ignored.

    Returns:
        Stream: Obspy Stream containing three channels of acceleration data
        (cm/s**2).
    """
    logging.debug("Starting read_dmg.")
    if not is_dmg(filename):
        raise Exception(
            '%s is not a valid DMG strong motion data file.' % filename)

    # Check for units and location
    units = kwargs.get('units', 'acc')
    location = kwargs.get('location', '')

    if units not in UNITS:
        raise Exception('DMG: Not a valid choice of units.')

    # Check for DMG format and determine volume type
    line = open(filename, 'rt', encoding='utf-8').readline()
    if is_dmg(filename):
        if line.lower().find('uncorrected') >= 0:
            reader = 'V1'
        elif line.lower().find('corrected') >= 0:
            reader = 'V2'
        elif line.lower().find('response') >= 0:
            reader = 'V3'

    # Count the number of lines in the file
    with open(filename, encoding='utf-8') as f:
        line_count = sum(1 for _ in f)

    # Read as many channels as are present in the file
    line_offset = 0
    trace_list = []
    while line_offset < line_count:
        if reader == 'V2':
            traces, line_offset = _read_volume_two(
                filename, line_offset, location=location, units=units)
            if traces is not None:
                trace_list += traces
        elif reader == 'V1':
            traces, line_offset = _read_volume_one(
                filename, line_offset, location=location, units=units)
            if traces is not None:
                trace_list += traces
        else:
            raise ValueError('DMG: Not a supported volume.')

    stream = StationStream([])
    for trace in trace_list:
        # For our purposes, we only want acceleration, so lets only return
        # that; we may need to change this later if others start using this
        # code and want to read in the other data.
        if trace.stats['standard']['units'] == units:
            stream.append(trace)
    return [stream]
def test_fas():
    """
    Testing based upon the work provided in
    https://github.com/arkottke/notebooks/blob/master/effective_amp_spectrum.ipynb
    """
    ddir = os.path.join("data", "testdata")
    datadir = pkg_resources.resource_filename("gmprocess", ddir)
    fas_file = os.path.join(datadir, "fas_geometric_mean.pkl")
    p1 = os.path.join(datadir, "peer", "RSN763_LOMAP_GIL067.AT2")
    p2 = os.path.join(datadir, "peer", "RSN763_LOMAP_GIL337.AT2")

    stream = StationStream([])
    for idx, fpath in enumerate([p1, p2]):
        with open(fpath, encoding="utf-8") as file_obj:
            for _ in range(3):
                next(file_obj)
            meta = re.findall(r"[.0-9]+", next(file_obj))
            dt = float(meta[1])
            accels = np.array(
                [col for line in file_obj for col in line.split()],
                dtype=float)
        trace = StationTrace(
            data=accels,
            header={
                "channel": "H" + str(idx),
                "delta": dt,
                "units": "acc",
                "standard": {
                    "corner_frequency": np.nan,
                    "station_name": "",
                    "source": "json",
                    "instrument": "",
                    "instrument_period": np.nan,
                    "source_format": "json",
                    "comments": "",
                    "structure_type": "",
                    "sensor_serial_number": "",
                    "source_file": "",
                    "process_level": "raw counts",
                    "process_time": "",
                    "horizontal_orientation": np.nan,
                    "vertical_orientation": np.nan,
                    "units": "acc",
                    "units_type": "acc",
                    "instrument_sensitivity": np.nan,
                    "instrument_damping": np.nan,
                },
            },
        )
        stream.append(trace)

    for tr in stream:
        response = {"input_units": "counts", "output_units": "cm/s^2"}
        tr.setProvenance("remove_response", response)

    target_df = pd.read_pickle(fas_file)
    ind_vals = target_df.index.values
    per = np.unique(
        [float(i[0].split(")")[0].split("(")[1]) for i in ind_vals])
    freqs = 1 / per
    imts = ["fas" + str(p) for p in per]
    summary = StationSummary.from_stream(stream, ["geometric_mean"],
                                         imts,
                                         bandwidth=30)

    pgms = summary.pgms
    # pgms.to_pickle(fas_file)
    for idx, f in enumerate(freqs):
        fstr = f"FAS({1 / f:.3f})"
        fval1 = pgms.loc[fstr, "GEOMETRIC_MEAN"].Result
        fval2 = target_df.loc[fstr, "GEOMETRIC_MEAN"].Result
        np.testing.assert_allclose(fval1, fval2, rtol=1e-5, atol=1e-5)
示例#9
0
def test_fas():
    """
    Testing based upon the work provided in
    https://github.com/arkottke/notebooks/blob/master/effective_amp_spectrum.ipynb
    """
    ddir = os.path.join('data', 'testdata')
    datadir = pkg_resources.resource_filename('gmprocess', ddir)
    fas_file = os.path.join(datadir, 'fas_arithmetic_mean.pkl')
    p1 = os.path.join(datadir, 'peer', 'RSN763_LOMAP_GIL067.AT2')
    p2 = os.path.join(datadir, 'peer', 'RSN763_LOMAP_GIL337.AT2')

    stream = StationStream([])
    for idx, fpath in enumerate([p1, p2]):
        with open(fpath, encoding='utf-8') as file_obj:
            for _ in range(3):
                next(file_obj)
            meta = re.findall(r'[.0-9]+', next(file_obj))
            dt = float(meta[1])
            accels = np.array(
                [col for line in file_obj for col in line.split()])
        trace = StationTrace(data=accels,
                             header={
                                 'channel': 'H' + str(idx),
                                 'delta': dt,
                                 'units': 'acc',
                                 'standard': {
                                     'corner_frequency': np.nan,
                                     'station_name': '',
                                     'source': 'json',
                                     'instrument': '',
                                     'instrument_period': np.nan,
                                     'source_format': 'json',
                                     'comments': '',
                                     'structure_type': '',
                                     'sensor_serial_number': '',
                                     'source_file': '',
                                     'process_level': 'raw counts',
                                     'process_time': '',
                                     'horizontal_orientation': np.nan,
                                     'vertical_orientation': np.nan,
                                     'units': 'acc',
                                     'units_type': 'acc',
                                     'instrument_sensitivity': np.nan,
                                     'instrument_damping': np.nan
                                 }
                             })
        stream.append(trace)

    for tr in stream:
        response = {'input_units': 'counts', 'output_units': 'cm/s^2'}
        tr.setProvenance('remove_response', response)

    target_df = pd.read_pickle(fas_file)
    ind_vals = target_df.index.values
    per = np.unique(
        [float(i[0].split(')')[0].split('(')[1]) for i in ind_vals])
    freqs = 1 / per
    imts = ['fas' + str(p) for p in per]
    summary = StationSummary.from_stream(stream, ['arithmetic_mean'],
                                         imts,
                                         bandwidth=30)

    pgms = summary.pgms
    # pgms.to_pickle(fas_file)
    for idx, f in enumerate(freqs):
        fstr = 'FAS(%.3f)' % (1 / f)
        fval1 = pgms.loc[fstr, 'ARITHMETIC_MEAN'].Result
        fval2 = target_df.loc[fstr, 'ARITHMETIC_MEAN'].Result
        np.testing.assert_allclose(fval1, fval2, rtol=1e-5, atol=1e-5)
def test_radial_transverse():

    origin = Origin(latitude=47.149, longitude=-122.7266667)
    st = read(os.path.join(datadir, "resp_cor", "UW.ALCT.--.*.MSEED"))

    st[0].stats.standard = {}
    st[0].stats.standard["horizontal_orientation"] = 0.0
    st[0].stats["channel"] = "HN1"
    st[1].stats.standard = {}
    st[1].stats.standard["horizontal_orientation"] = 90.0
    st[1].stats["channel"] = "HN2"
    st[2].stats.standard = {}
    st[2].stats.standard["horizontal_orientation"] = np.nan
    st[2].stats["channel"] = "HNZ"

    inv = read_inventory(os.path.join(datadir, "inventory.xml"))
    stalat, stalon = inv[0][0][0].latitude, inv[0][0][0].longitude

    for i, tr in enumerate(st):
        tr.stats["coordinates"] = {"latitude": stalat}
        tr.stats["coordinates"]["longitude"] = stalon
        tr.stats["standard"].update({
            "corner_frequency": np.nan,
            "station_name": "",
            "source": "json",
            "instrument": "",
            "instrument_period": np.nan,
            "vertical_orientation": np.nan,
            "source_format": "json",
            "comments": "",
            "structure_type": "",
            "source_file": "",
            "sensor_serial_number": "",
            "process_level": "raw counts",
            "process_time": "",
            "units": "cm/s/s",
            "units_type": "acc",
            "instrument_sensitivity": np.nan,
            "volts_to_counts": np.nan,
            "instrument_damping": np.nan,
        })
    baz = gps2dist_azimuth(stalat, stalon, origin.latitude,
                           origin.longitude)[1]

    st1 = st.copy()
    st1[0].stats.channel = st1[0].stats.channel[:-1] + "N"
    st1[1].stats.channel = st1[1].stats.channel[:-1] + "E"
    st1.rotate(method="NE->RT", back_azimuth=baz)
    pgms = np.abs(st1.max())

    st2 = StationStream([])
    for t in st:
        st2.append(StationTrace(t.data, t.stats))

    for tr in st2:
        response = {"input_units": "counts", "output_units": "cm/s^2"}
        tr.setProvenance("remove_response", response)

    summary = StationSummary.from_stream(st2, ["radial_transverse"], ["pga"],
                                         origin)
    pgmdf = summary.pgms
    R = pgmdf.loc["PGA", "HNR"].Result
    T = pgmdf.loc["PGA", "HNT"].Result
    np.testing.assert_almost_equal(pgms[0], sp.g * R)

    np.testing.assert_almost_equal(pgms[1], sp.g * T)

    # Test with a station whose channels are not aligned to E-N
    SEW_st = read(os.path.join(datadir, "resp_cor", "GS.SEW.*.mseed"))
    SEW_inv = read_inventory(os.path.join(datadir, "inventory_sew.xml"))
    stalat, stalon = inv[0][0][0].latitude, inv[0][0][0].longitude

    # This needs to be checked. The target data doesn't appear to be
    # correct. This can be updated when a tolerance is added to the rotate
    # method.
    """traces = []
    for tr in SEW_st:
        tr.stats.coordinates = {'latitude': stalat,
                                'longitude': stalon}
        tr.stats.standard = {'corner_frequency': np.nan,
            'station_name': '',
            'source': 'json',
            'instrument': '',
            'instrument_period': np.nan,
            'source_format': 'json',
            'comments': '',
            'structure_type': '',
            'sensor_serial_number': '',
            'process_level': 'raw counts',
            'process_time': '',
            'horizontal_orientation':
             SEW_inv.get_channel_metadata(tr.get_id())['azimuth'],
            'units': 'acc',
            'instrument_damping': np.nan}
        traces += [StationTrace(tr.data, tr.stats)]
    baz = gps2dist_azimuth(stalat, stalon,
                           origin.latitude, origin.longitude)[1]
    SEW_st_copy = StationStream(traces)
    SEW_st_copy.rotate(method='->NE', inventory=SEW_inv)
    SEW_st_copy.rotate(method='NE->RT', back_azimuth=baz)
    pgms = np.abs(SEW_st_copy.max())

    summary = StationSummary.from_stream(
        SEW_st, ['radial_transverse'], ['pga'], origin)

    np.testing.assert_almost_equal(
        pgms[1], sp.g * summary.pgms['PGA']['R'])

    np.testing.assert_almost_equal(
        pgms[2], sp.g * summary.pgms['PGA']['T'])"""

    # Test failure case without two horizontal channels
    copy1 = st2.copy()
    copy1[0].stats.channel = copy1[0].stats.channel[:-1] + "3"
    pgms = StationSummary.from_stream(copy1, ["radial_transverse"], ["pga"],
                                      origin).pgms
    assert np.isnan(pgms.loc["PGA", "HNR"].Result)
    assert np.isnan(pgms.loc["PGA", "HNT"].Result)

    # Test failure case when channels are not orthogonal
    copy3 = st2.copy()
    copy3[0].stats.standard.horizontal_orientation = 100
    pgms = StationSummary.from_stream(copy3, ["radial_transverse"], ["pga"],
                                      origin).pgms
    assert np.isnan(pgms.loc["PGA", "HNR"].Result)
    assert np.isnan(pgms.loc["PGA", "HNT"].Result)
示例#11
0
def test_fas():
    """
    Testing based upon the work provided in
    https://github.com/arkottke/notebooks/blob/master/effective_amp_spectrum.ipynb
    """
    ddir = os.path.join('data', 'testdata')
    datadir = pkg_resources.resource_filename('gmprocess', ddir)
    fas_file = os.path.join(datadir, 'fas_quadratic_mean.txt')
    p1 = os.path.join(datadir, 'peer', 'RSN763_LOMAP_GIL067.AT2')
    p2 = os.path.join(datadir, 'peer', 'RSN763_LOMAP_GIL337.AT2')

    stream = StationStream([])
    for idx, fpath in enumerate([p1, p2]):
        with open(fpath, encoding='utf-8') as file_obj:
            for _ in range(3):
                next(file_obj)
            meta = re.findall(r'[.0-9]+', next(file_obj))
            dt = float(meta[1])
            accels = np.array(
                [col for line in file_obj for col in line.split()])
        trace = StationTrace(data=accels,
                             header={
                                 'channel': 'H' + str(idx),
                                 'delta': dt,
                                 'units': 'acc',
                                 'standard': {
                                     'corner_frequency': np.nan,
                                     'station_name': '',
                                     'source': 'json',
                                     'instrument': '',
                                     'instrument_period': np.nan,
                                     'source_format': 'json',
                                     'comments': '',
                                     'structure_type': '',
                                     'sensor_serial_number': '',
                                     'source_file': '',
                                     'process_level': 'raw counts',
                                     'process_time': '',
                                     'horizontal_orientation': np.nan,
                                     'vertical_orientation': np.nan,
                                     'units': 'acc',
                                     'units_type': 'acc',
                                     'instrument_sensitivity': np.nan,
                                     'instrument_damping': np.nan
                                 }
                             })
        stream.append(trace)

    for tr in stream:
        response = {'input_units': 'counts', 'output_units': 'cm/s^2'}
        tr.setProvenance('remove_response', response)

    freqs, fas = np.loadtxt(fas_file,
                            unpack=True,
                            usecols=(0, 1),
                            delimiter=',')
    # scaling required on the test data as it was not accounted for originally
    imts = ['fas' + str(1 / p) for p in freqs]
    summary = StationSummary.from_stream(stream, ['quadratic_mean'],
                                         imts,
                                         bandwidth=30)

    pgms = summary.pgms
    for idx, f in enumerate(freqs):
        fstr = 'FAS(%.3f)' % (1 / f)
        fval = pgms.loc[fstr, 'QUADRATIC_MEAN'].Result
        np.testing.assert_allclose(fval,
                                   fas[idx] * stream[0].stats.delta,
                                   rtol=1e-5,
                                   atol=1e-5)
示例#12
0
def read_dmg(filename, config=None, **kwargs):
    """Read DMG strong motion file.

    Notes:
        CSMIP is synonymous to as DMG in this reader.

    Args:
        filename (str):
            Path to possible DMG data file.
        config (dict):
            Dictionary containing configuration.
        kwargs (ref):
            units (str): String determining which timeseries is return. Valid
                    options include 'acc', 'vel', 'disp'. Default is 'acc'.
            Other arguments will be ignored.

    Returns:
        Stream: Obspy Stream containing three channels of acceleration data
        (cm/s**2).
    """
    logging.debug("Starting read_dmg.")
    if not is_dmg(filename, config):
        raise Exception(f"{filename} is not a valid DMG strong motion data file.")

    # Check for units and location
    units = kwargs.get("units", "acc")
    location = kwargs.get("location", "")

    if units not in UNITS:
        raise Exception("DMG: Not a valid choice of units.")

    # Check for DMG format and determine volume type
    line = open(filename, "rt", encoding="utf-8").readline()
    if is_dmg(filename, config):
        if line.lower().find("uncorrected") >= 0:
            reader = "V1"
        elif line.lower().find("corrected") >= 0:
            reader = "V2"
        elif line.lower().find("response") >= 0:
            reader = "V3"

    # Count the number of lines in the file
    with open(filename, encoding="utf-8") as f:
        line_count = sum(1 for _ in f)

    # Read as many channels as are present in the file
    line_offset = 0
    trace_list = []
    while line_offset < line_count:
        if reader == "V2":
            traces, line_offset = _read_volume_two(
                filename, line_offset, location=location, units=units
            )
            if traces is not None:
                trace_list += traces
        elif reader == "V1":
            traces, line_offset = _read_volume_one(
                filename, line_offset, location=location, units=units, config=config
            )
            if traces is not None:
                trace_list += traces
        else:
            raise ValueError("DMG: Not a supported volume.")

    stream = StationStream([])
    for trace in trace_list:
        # For our purposes, we only want acceleration, so lets only return
        # that; we may need to change this later if others start using this
        # code and want to read in the other data.
        if trace.stats["standard"]["units_type"] == units:
            stream.append(trace)
    return [stream]