def test_exceptions():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    data_dir = os.path.join(homedir, '..', 'data',
                            'evolutionary_IM_examples.xlsx')
    df = pd.read_excel(data_dir)
    time = df.iloc[:, 0].values
    # input output is m/s/s
    acc = df.iloc[:, 1].values / 100
    delta = time[2] - time[1]
    sr = 1 / delta
    header = {
        'delta': delta,
        'sampling_rate': sr,
        'npts': len(acc),
        'units': 'm/s/s',
        'channel': 'H1'
    }
    trace = Trace(data=acc, header=header)
    stream = Stream([trace])
    try:
        StationSummary.from_stream(stream, ['gmrotd50'], ['arias'])
        sucess = True
    except:
        sucess = False
    assert sucess == False

    try:
        StationSummary.from_stream(stream, ['rotd50'], ['arias'])
        sucess = True
    except:
        sucess = False
    assert sucess == False
    def setStationDictionary(self, imcs=None, imts=None):
        """
        Calculate the station summaries and set the dictionary.

        Args:
            imcs (list): List of intensity measurement components (str). Default
                    is None. If none imclist from config is used.
            imts (list): List of intensity measurement types (str). Default
                    is None. If None imtlist from config is used.

        Notes:
            Requires that corrected_streams is set.

        Raises:
            Exception: If corrected_streams is not set.
        """
        # use defaults if imcs or imts are not specified
        config = get_config()
        if imcs is None:
            imcs = config['imclist']
        if imts is None:
            imts = config['imtlist']

        if self.corrected_streams is None:
            raise Exception('Processed streams are required to create a '
                            'StationSummary object and create the dictionary.')
        station_dict = OrderedDict()
        for station in self.corrected_streams:
            stream = self.corrected_streams[station]
            station_dict[station] = StationSummary.from_stream(
                stream, imcs, imts)
        self._station_dict = station_dict
def test_gmrotd():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    datafile_v2 = os.path.join(homedir, '..', 'data', 'geonet',
                               '20161113_110259_WTMC_20.V2A')
    stream_v2 = read_geonet(datafile_v2)
    station_summary = StationSummary.from_stream(
        stream_v2, ['gmrotd0', 'gmrotd50', 'gmrotd100'], ['pga'])
Beispiel #4
0
def test_greater_of_two_horizontals():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    datafile_v2 = os.path.join(homedir, '..', 'data', 'geonet',
                               '20161113_110259_WTMC_20.V2A')
    stream_v2 = read_geonet(datafile_v2)
    station_summary = StationSummary.from_stream(
        stream_v2, ['greater_of_two_horizontals'], ['pga'])
    station_dict = station_summary.pgms['PGA']
    greater = station_dict['GREATER_OF_TWO_HORIZONTALS']
    np.testing.assert_almost_equal(greater, 99.3173469387755, decimal=1)
def test_arias():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    data_dir = os.path.join(homedir, '..', 'data',
                            'evolutionary_IM_examples.xlsx')
    df = pd.read_excel(data_dir)
    time = df.iloc[:, 0].values
    # input output is m/s/s
    acc = df.iloc[:, 1].values / 100
    target_IA = df.iloc[:, 4].values[0]
    delta = time[2] - time[1]
    sr = 1 / delta
    header = {
        'delta': delta,
        'sampling_rate': sr,
        'npts': len(acc),
        'units': 'm/s/s',
        'channel': 'H1'
    }
    trace = Trace(data=acc, header=header)
    stream = Stream([trace])
    Ia = calculate_arias(stream, ['channels'])['H1']
    # the target has only one decimal place and is in cm/s/s
    Ia = Ia * 100
    np.testing.assert_almost_equal(Ia, target_IA, decimal=1)

    # input is cm/s/s output is m/s/s
    trace = Trace(data=acc * 100, header=header)
    stream = Stream([trace])
    station = StationSummary.from_stream(stream, ['channels'], ['arias'])
    Ia = station.pgms['ARIAS']['H1']
    # the target has only one decimal place and is in cm/s/s
    Ia = Ia * 100
    np.testing.assert_almost_equal(Ia, target_IA, decimal=1)

    # Test other components
    data_file = os.path.join(homedir, '..', 'data', 'cwb', '2-ECU.dat')
    stream = read_data(data_file)
    station = StationSummary.from_stream(
        stream, ['channels', 'gmrotd', 'rotd50', 'greater_of_two_horizontals'],
        ['arias'])
def test_exceptions():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    datafile_v2 = os.path.join(homedir, '..', 'data', 'geonet',
                               '20161113_110259_WTMC_20.V2A')
    stream_v2 = read_geonet(datafile_v2)
    stream1 = stream_v2.select(channel="HN1")
    try:
        StationSummary.from_stream(stream1, ['gmrotd50'], ['pga'])
        sucess = True
    except PGMException:
        sucess = False
    assert sucess == False

    for trace in stream_v2:
        stream1.append(trace)
    try:
        StationSummary.from_stream(stream1, ['gmrotd50'], ['pga'])
        sucess = True
    except PGMException:
        sucess = False
    assert sucess == False

    stream2 = Stream([
        stream_v2.select(channel="HN1")[0],
        Trace(data=np.asarray([]), header={"channel": "HN2"})
    ])
    try:
        StationSummary.from_stream(stream2, ['gmrotd50'], ['pga'])
        sucess = True
    except PGMException:
        sucess = False
    assert sucess == False
def test_sa():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    datafile_v2 = os.path.join(homedir, '..', 'data', 'geonet',
                               '20161113_110259_WTMC_20.V2A')
    stream_v2 = read_geonet(datafile_v2)
    sa_target = {}
    for trace in stream_v2:
        vtrace = trace.copy()
        vtrace.integrate()
        sa_target[vtrace.stats['channel']] = np.abs(vtrace.max())
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(
            stream_v2, ['greater_of_two_horizontals', 'gmrotd50', 'channels'],
            ['sa1.0', 'saincorrect'])
    assert 'SA1.0' in station_summary.pgms
Beispiel #8
0
def test_channels():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    datafile_v2 = os.path.join(homedir, '..', 'data', 'geonet',
                               '20161113_110259_WTMC_20.V2A')
    stream_v2 = read_geonet(datafile_v2)
    station_summary = StationSummary.from_stream(stream_v2, ['channels'],
                                                 ['pga'])
    station_dict = station_summary.pgms['PGA']
    np.testing.assert_almost_equal(station_dict['HN2'],
                                   81.28979591836733,
                                   decimal=1)
    np.testing.assert_almost_equal(station_dict['HN1'],
                                   99.3173469387755,
                                   decimal=1)
    np.testing.assert_almost_equal(station_dict['HNZ'],
                                   183.89693877551022,
                                   decimal=1)
def test_pga():
    homedir = os.path.dirname(os.path.abspath(
        __file__))  # where is this script?
    datafile_v2 = os.path.join(homedir, '..', 'data', 'geonet',
                               '20161113_110259_WTMC_20.V2A')
    stream_v2 = read_geonet(datafile_v2)
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        station_summary = StationSummary.from_stream(stream_v2,
                ['channels', 'greater_of_two_horizontals', 'gmrotd50',
                'gmrotd100', 'gmrotd0'],
                ['pga', 'sa1.0', 'saincorrect'])
    station_dict = station_summary.pgms['PGA']
    greater = station_dict['GREATER_OF_TWO_HORIZONTALS']
    np.testing.assert_almost_equal(station_dict['HN2'], 81.28979591836733, decimal=1)
    np.testing.assert_almost_equal(station_dict['HN1'], 99.3173469387755, decimal=1)
    np.testing.assert_almost_equal(station_dict['HNZ'], 183.89693877551022, decimal=1)
    np.testing.assert_almost_equal(greater, 99.3173469387755, decimal=1)
Beispiel #10
0
def test_rotd():
    homedir = os.path.dirname(os.path.abspath(__file__))
    datadir = os.path.join(homedir, '..', 'data', 'process')
    # Create a stream and station summary, convert from m/s^2 to cm/s^2 (GAL)
    osc1_data = np.genfromtxt(datadir + '/ALCTENE.UW..sac.acc.final.txt')
    osc2_data = np.genfromtxt(datadir + '/ALCTENN.UW..sac.acc.final.txt')
    osc1_data = osc1_data.T[1] * 100
    osc2_data = osc2_data.T[1] * 100
    tr1 = Trace(data=osc1_data,
                header={
                    'channel': 'H1',
                    'delta': 0.01,
                    'npts': 10400
                })
    tr2 = Trace(data=osc2_data,
                header={
                    'channel': 'H2',
                    'delta': 0.01,
                    'npts': 10400
                })
    st = Stream([tr1, tr2])

    target_pga50 = 4.12528265306
    target_sa1050 = 10.7362857143
    target_pgv50 = 6.239364
    target_sa0350 = 10.1434159021
    target_sa3050 = 1.12614169215
    station = StationSummary.from_stream(
        st, ['rotd50'], ['pga', 'pgv', 'sa0.3', 'sa1.0', 'sa3.0'])
    pgms = station.pgms
    np.testing.assert_allclose(pgms['PGA']['ROTD50.0'], target_pga50, atol=0.1)
    np.testing.assert_allclose(pgms['SA1.0']['ROTD50.0'],
                               target_sa1050,
                               atol=0.1)
    np.testing.assert_allclose(pgms['PGV']['ROTD50.0'], target_pgv50, atol=0.1)
    np.testing.assert_allclose(pgms['SA0.3']['ROTD50.0'],
                               target_sa0350,
                               atol=0.1)
    np.testing.assert_allclose(pgms['SA3.0']['ROTD50.0'],
                               target_sa3050,
                               atol=0.1)
Beispiel #11
0
def streams_to_dataframe(streams, lat=None, lon=None, imtlist=None):
    """Extract peak ground motions from list of Stream objects.

    Note: The PGM columns underneath each channel will be variable
    depending on the units of the Stream being passed in (velocity
    sensors can only generate PGV) and on the imtlist passed in by
    user. Spectral acceleration columns will be formatted as SA(0.3)
    for 0.3 second spectral acceleration, for example.

    Args:
        streams (list): List of Stream objects.
        lat (float): Epicentral latitude.
        lon (float): Epicentral longitude
        imtlist (list): Strings designating desired PGMs to create
            in table.
    Returns:
        DataFrame: Pandas dataframe containing columns:
            - STATION Station code.
            - NAME Text description of station.
            - LOCATION Two character location code.
            - SOURCE Long form string containing source network.
            - NETWORK Short network code.
            - LAT Station latitude
            - LON Station longitude
            - DISTANCE Epicentral distance (km) (if epicentral lat/lon provided)
            - HHE East-west channel (or H1) (multi-index with pgm columns):
                - PGA Peak ground acceleration (%g).
                - PGV Peak ground velocity (cm/s).
                - SA(0.3) Pseudo-spectral acceleration at 0.3 seconds (%g).
                - SA(1.0) Pseudo-spectral acceleration at 1.0 seconds (%g).
                - SA(3.0) Pseudo-spectral acceleration at 3.0 seconds (%g).
            - HHN North-south channel (or H2) (multi-index with pgm columns):
                - PGA Peak ground acceleration (%g).
                - PGV Peak ground velocity (cm/s).
                - SA(0.3) Pseudo-spectral acceleration at 0.3 seconds (%g).
                - SA(1.0) Pseudo-spectral acceleration at 1.0 seconds (%g).
                - SA(3.0) Pseudo-spectral acceleration at 3.0 seconds (%g).
            - HHZ Vertical channel (or HZ) (multi-index with pgm columns):
                - PGA Peak ground acceleration (%g).
                - PGV Peak ground velocity (cm/s).
                - SA(0.3) Pseudo-spectral acceleration at 0.3 seconds (%g).
                - SA(1.0) Pseudo-spectral acceleration at 1.0 seconds (%g).
                - SA(3.0) Pseudo-spectral acceleration at 3.0 seconds (%g).

    """
    # Validate imtlist, ensure everything is a valid IMT
    if imtlist is None:
        imtlist = DEFAULT_IMTS
    else:
        imtlist, invalid = _validate_imtlist(imtlist)
        if len(invalid):
            fmt = 'IMTs %s are invalid specifications. Skipping.'
            warnings.warn(fmt % (str(invalid)), Warning)

    # top level columns
    columns = ['STATION', 'NAME', 'SOURCE', 'NETID', 'LAT', 'LON']

    if lat is not None and lon is not None:
        columns.append('DISTANCE')

    # Check for common events and group channels
    streams = group_channels(streams)

    # Determine which channels should be created
    channels = []
    subchannels = []
    for stream in streams:
        for trace in stream:
            if trace.stats['channel'] not in channels:
                channels.append(trace.stats['channel'])
            if not len(subchannels):
                try:
                    units = trace.stats.standard['units']
                except Exception:
                    units = trace.stats['units']
                if units == 'acc':
                    subchannels = imtlist
                elif units == 'vel':
                    subchannels = list(set(['pgv']).intersection(set(imtlist)))
                else:
                    raise ValueError('Unknown units %s' % trace['units'])

    # Create dictionary to hold columns of basic data
    meta_dict = {}
    for column in columns:
        meta_dict[column] = []

    subcolumns = [''] * len(columns)
    subcolumns += subchannels * len(channels)

    # It's complicated to create a dataframe with a multiindex.
    # Create two arrays, one for top level columns, and another for "sub" columns.
    newchannels = []
    for channel in channels:
        newchannels += [channel] * len(subchannels)
    columns += newchannels

    dfcolumns = pd.MultiIndex.from_arrays([columns, subcolumns])
    dataframe = pd.DataFrame(columns=dfcolumns)

    # make sure we set the data types of all of the columns
    dtypes = {
        'STATION': str,
        'NAME': str,
        'SOURCE': str,
        'NETID': str,
        'LAT': np.float64,
        'LON': np.float64
    }

    if lat is not None:
        dtypes.update({'DISTANCE': np.float64})

    dataframe = dataframe.astype(dtypes)

    # create a dictionary for pgm data.  Because it is difficult to set columns
    # in a multiindex, we're creating dictionaries to create the channel
    # columns separately.
    channel_dicts = {}
    for channel in channels:
        channel_dicts[channel] = {}
        for subchannel in subchannels:
            channel_dicts[channel][subchannel] = []

    # loop over streams and extract data
    for stream in streams:
        for key in meta_dict.keys():
            if key == 'NAME':
                name_str = stream[0].stats['standard']['station_name']
                meta_dict[key].append(name_str)
            elif key == 'LAT':
                latitude = stream[0].stats['coordinates']['latitude']
                meta_dict[key].append(latitude)
            elif key == 'LON':
                longitude = stream[0].stats['coordinates']['longitude']
                meta_dict[key].append(longitude)
            elif key == 'STATION':
                meta_dict[key].append(stream[0].stats['station'])
            elif key == 'SOURCE':
                source = stream[0].stats.standard['source']
                meta_dict[key].append(source)
            elif key == 'NETID':
                meta_dict[key].append(stream[0].stats['network'])
            else:
                pass

        if lat is not None:
            dist, _, _ = gps2dist_azimuth(lat, lon, latitude, longitude)
            meta_dict['DISTANCE'].append(dist / 1000)

        # process acceleration and store velocity traces
        for idx, trace in enumerate(stream):
            channel = trace.stats['channel']
            try:
                units = trace.stats.standard['units']
            except Exception:
                units = trace.stats['units']
            if units == 'acc':
                # do some basic data processing - if this has already been
                # done, it shouldn't hurt to repeat it.
                #TODO Check if data was processed/use new process routine
                with warnings.catch_warnings():
                    warnings.simplefilter("ignore")
                    stream[idx] = filter_detrend(trace,
                                                 taper_type='cosine',
                                                 taper_percentage=0.05,
                                                 filter_type='highpass',
                                                 filter_frequency=FILTER_FREQ,
                                                 filter_zerophase=True,
                                                 filter_corners=CORNERS)
            elif trace.stats['units'] == 'vel':
                # we only have a velocity channel
                pgv = np.abs(trace.max())
                channel_dicts[channel]['PGV'].append(pgv)
        # get station summary and assign values
        station = StationSummary.from_stream(stream, ['channels'], imtlist)
        spectral_streams = []
        tchannels = [t.stats.channel for t in stream]
        for channel in channels:
            if channel not in tchannels:
                for station_imt in imtlist:
                    channel_dicts[channel][station_imt].append(np.nan)
            else:
                for station_imt in imtlist:
                    imt_value = station.pgms[station_imt.upper()][channel]
                    channel_dicts[channel][station_imt].append(imt_value)
                    osc = station.oscillators[station_imt.upper()]
                    if station_imt.startswith('SA'):
                        spectral_streams.append(
                            Stream(osc.select(channel=channel)[0]))

    # assign the non-channel specific stuff to dataframe
    for key, value in meta_dict.items():
        dataframe[key] = value

    # for each channel, assign peak values to dataframe
    for channel, channel_dict in channel_dicts.items():
        subdf = dataframe[channel].copy()
        for key, value in channel_dict.items():
            subdf[key] = value
        dataframe[channel] = subdf

    return (dataframe, spectral_streams)
def test_stationsummary():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    datafile = os.path.join(homedir, '..', 'data', 'geonet',
                            '20161113_110259_WTMC_20.V2A')
    target_imcs = np.sort(
        np.asarray([
            'GREATER_OF_TWO_HORIZONTALS', 'HN1', 'HN2', 'HNZ', 'ROTD50.0',
            'ROTD100.0'
        ]))
    target_imts = np.sort(np.asarray(['SA1.0', 'PGA', 'PGV']))
    stream = read_geonet(datafile)
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        stream_summary = StationSummary.from_stream(stream, [
            'greater_of_two_horizontals', 'channels', 'rotd50', 'rotd100',
            'invalid'
        ], ['sa1.0', 'PGA', 'pgv', 'invalid'])
        original_stream = stream_summary.stream
        stream_summary.stream = []
        final_stream = stream_summary.stream
        assert original_stream == final_stream
        original_code = stream_summary.station_code
        stream_summary.station_code = ''
        final_code = stream_summary.station_code
        assert original_code == final_code
        original_oscillators = stream_summary.oscillators
        final_oscillators = stream_summary.oscillators
        assert original_oscillators == final_oscillators
        np.testing.assert_array_equal(np.sort(stream_summary.components),
                                      target_imcs)
        np.testing.assert_array_equal(np.sort(stream_summary.imts),
                                      target_imts)
        np.testing.assert_almost_equal(stream_summary.get_pgm('PGA', 'HN1'),
                                       99.3173469387755,
                                       decimal=1)
        target_available = np.sort(
            np.asarray([
                'calculate_greater_of_two_horizontals', 'calculate_channels',
                'calculate_gmrotd', 'calculate_rotd'
            ]))
        imcs = stream_summary.available_imcs
        np.testing.assert_array_equal(np.sort(imcs), target_available)
        target_available = np.sort(
            np.asarray([
                'calculate_pga', 'calculate_pgv', 'calculate_sa',
                'calculate_arias'
            ]))
        imts = stream_summary.available_imts
        np.testing.assert_array_equal(np.sort(imts), target_available)
    test_pgms = {
        'PGV': {
            'ROTD100.0': 114.24894584734818,
            'ROTD50.0': 81.55436750525355,
            'HNZ': 37.47740000000001,
            'HN1': 100.81460000000004,
            'HN2': 68.4354,
            'GREATER_OF_TWO_HORIZONTALS': 100.81460000000004
        },
        'PGA': {
            'ROTD100.0': 100.73875535385548,
            'ROTD50.0': 91.40178541935455,
            'HNZ': 183.7722361866693,
            'HN1': 99.24999872535474,
            'HN2': 81.23467239067368,
            'GREATER_OF_TWO_HORIZONTALS': 99.24999872535474
        },
        'SA1.0': {
            'ROTD100.0': 146.9023350124098,
            'ROTD50.0': 106.03202302692158,
            'HNZ': 27.74118995438756,
            'HN1': 136.25041187387063,
            'HN2': 84.69296738413021,
            'GREATER_OF_TWO_HORIZONTALS': 136.25041187387063
        }
    }
    datafile = os.path.join(homedir, '..', 'data', 'geonet',
                            '20161113_110313_THZ_20.V2A')
    invalid_stream = read_geonet(datafile)
    station_code = 'WTMC'
    pgm_summary = StationSummary.from_pgms(station_code, test_pgms)
    # assert pgm_summary.pgms == stream_summary.pgms
    adict = pgm_summary.pgms
    bdict = stream_summary.pgms
    cmp_dicts(adict, bdict)

    # oscillators cannot be calculated without a stream
    try:
        pgm_summary.generate_oscillators(pgm_summary.imts, 0.05)
        success = True
    except Exception:
        success = False
    assert success is False
    # Invalid stream inputs should be rejected
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        pgm_summary.stream = []
        assert pgm_summary.stream is None
        pgm_summary.stream = invalid_stream
        assert pgm_summary.stream is None
        pgm_summary.stream = stream
        assert pgm_summary.stream == stream
Beispiel #13
0
        filepath = os.path.join('/Users/tnye/PROJECTS/Duration/data/events',
                                event, 'ground_motion', data_type)

        if os.path.exists(filepath) == True:
            filt_stns = os.path.join(
                '/Users/tnye/PROJECTS/Duration/data/events', event,
                'select_stations', data_type + '.csv')

            # Read data.
            stations, station_stats = read_seismic.get_station_data(
                event, data_type)

            for station in stations:

                # Calculate imts.
                data = StationSummary.from_stream(station, imcs, imts).pgms
                if 'HN1' in data['PGA']:
                    h1 = 'HN1'
                    h2 = 'HN2'
                elif 'BN1' in data['PGA']:
                    h1 = 'BN1'
                    h2 = 'BN2'

                sa0_1_hn1.append(data['SA(0.1)'][h1])
                sa0_2_hn1.append(data['SA(0.2)'][h1])
                sa0_3_hn1.append(data['SA(0.3)'][h1])
                sa0_5_hn1.append(data['SA(0.5)'][h1])
                sa1_hn1.append(data['SA(1)'][h1])
                sa2_hn1.append(data['SA(2)'][h1])
                sa3_hn1.append(data['SA(3)'][h1])
                sa5_hn1.append(data['SA(5)'][h1])
def test_rotation():

    # Create a stream and station summary, convert from m/s^2 to cm/s^2 (GAL)
    osc1_data = np.genfromtxt(datadir + '/ALCTENE.UW..sac.acc.final.txt')
    osc2_data = np.genfromtxt(datadir + '/ALCTENN.UW..sac.acc.final.txt')
    osc1_data = osc1_data.T[1]*100
    osc2_data = osc2_data.T[1]*100

    tr1 = Trace(data=osc1_data, header={'channel': 'H1', 'delta': 0.01,
                                        'npts': 10400})
    tr2 = Trace(data=osc2_data, header={'channel': 'H2', 'delta': 0.01,
                                        'npts': 10400})
    st = Stream([tr1, tr2])

    imts = ['PGA', 'PGV', 'SA0.3', 'SA1.0', 'SA3.0']
    station = StationSummary.from_stream(st, ['channels', 'rotd'], imts)

    # Get PGA and spectral accelerations
    st_PGA = station.oscillators['PGA']
    pgv = station.oscillators['PGV']
    st_SA10 = station.oscillators['SA1.0_ROT']
    st_SA30 = station.oscillators['SA3.0_ROT']
    st_SA03 = station.oscillators['SA0.3_ROT']

    rot_st_PGA = rotate(st_PGA[0], st_PGA[1], combine=True)
    rot_PGV = rotate(pgv[0], pgv[1], combine=True)

    max50_pgv = (get_max(rot_PGV, 'max', percentiles=[50]))[1]
    max50_pga = (get_max(rot_st_PGA, 'max', percentiles=[50]))[1]
    max50_SA10 = (get_max(st_SA10[0], 'max', percentiles=[50]))[1]
    max50_SA03 = (get_max(st_SA03[0], 'max', percentiles=[50]))[1]
    max50_SA30 = (get_max(st_SA30[0], 'max', percentiles=[50]))[1]

    # Check the calculations
    np.testing.assert_allclose(max50_pga, 4.12528265306, atol=0.1)
    np.testing.assert_allclose(max50_SA10, 10.7362857143, atol=0.1)
    np.testing.assert_allclose(max50_pgv, 6.239364, atol=0.1)
    np.testing.assert_allclose(max50_SA03, 10.1434159021, atol=0.1)
    np.testing.assert_allclose(max50_SA30, 1.12614169215, atol=0.1)

    # Test that GM, AM, and MAX work as expected with simple 1D datasets
    osc1 = np.asarray([0.0, 1.0, 2.0, 3.0])
    osc2 = np.asarray([4.0, 5.0, 6.0, 7.0])

    max_gm = get_max(osc1, 'gm', osc2)
    np.testing.assert_allclose(max_gm, 4.5826, atol=0.0001)

    max_am = get_max(osc1, 'am', osc2)
    np.testing.assert_allclose(max_am, 5.0, atol=0.0001)

    max_max = get_max(osc1, 'max', osc2)
    np.testing.assert_allclose(max_max, 7.0, atol=0.0001)

    # Test max for 1 1d Array
    osc1 = np.array([0.0, 1.0, 2.0])
    max_val = get_max(osc1, 'max')
    assert max_val == 2.0

    # Test arithmetic mean with 2D input
    osc1 = np.array([[0.0, 1.0], [2.0, 3.0]])
    osc2 = np.array([[4.0, 5.0], [6.0, 7.0]])
    means = get_max(osc1, 'am', osc2)[0]
    assert (means[0] == 3.0 and means[1] == 5.0)

    # Test greater of two horizontals
    maxs = get_max(osc1, 'max', osc2)[0]
    assert (maxs[0] == 5.0 and maxs[1] == 7.0)