def fromFiles(cls, directory, imcs=None, imts=None, process=True):
        """
        Read files from a directory and return an EventSummary object.

        Args:
            directory (str): Path to input files.
            imcs (list): List of intensity measurement components (str). Default
                    is None.
            imts (list): List of intensity measurement types (str). Default
                    is None.
            process (bool): Whether or not to process the streams.

        Returns:
            EventSummary: EventSummary object.
        """
        streams = []
        # gather streams so that they can be grouped
        for file_path in glob.glob(directory + '/*'):
            streams += [read_data(file_path)]
        streams = group_channels(streams)
        uncorrected_streams = {}
        for stream in streams:
            station = stream[0].stats['station']
            uncorrected_streams[station] = stream
        event = cls()
        event.uncorrected_streams = uncorrected_streams
        if process:
            event.process()
            # create dictionary of StationSummary objects for use by other methods
            event.setStationDictionary(imcs, imts)
        return event
示例#2
0
def test_spectral():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    acc_file = os.path.join(homedir, '..', 'data', 'geonet',
                            '20161113_110259_WTMC_20.V2A')
    acc = read_data(acc_file)
    get_spectral(1.0, acc, 0.05, rotation='gm')
示例#3
0
    def from_files(cls, directory, imcs, imts):
        """
        Read files from a directory and return an EventSummary object.

        Args:
            directory (str): Path to input files.
            imcs (list): List of intensity measurement components (str).
            imts (list): List of intensity measurement types (str).

        Returns:
            EventSummary: EventSummary object.
        """
        streams = []
        # gather streams
        for file_path in glob.glob(directory + '/*'):
            streams += [read_data(file_path)]
        # group station traces
        streams = group_channels(streams)
        # process streams
        #TODO separate into another method and add config for processing parameters
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            for idx, trace in enumerate(streams):
                streams[idx] = filter_detrend(streams[idx])
        # create dictionary of StationSummary objects
        station_dict = OrderedDict()
        for stream in streams:
            station = stream[0].stats['station']
            station_dict[station] = StationSummary(stream, imcs, imts)
        event = cls(station_dict)
        return event
示例#4
0
def test_velocity():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    acc_file = os.path.join(homedir, '..', 'data', 'geonet',
                            '20161113_110259_WTMC_20.V2A')
    acc = read_data(acc_file)
    target_v = acc.copy().integrate()[0]
    v = get_velocity(acc)
    np.testing.assert_allclose(v[0], target_v)
def test_plot():
    # read in data
    homedir = os.path.dirname(os.path.abspath(__file__))
    datadir = os.path.join(homedir, '..', 'data', 'cwb')
    streams = []
    for filename in glob.glob(datadir + '/*'):
        streams += [read_data(filename)]
    # One plot arias
    axes = plot_arias(streams[3])
    assert len(axes) == 3

    # Multiplot arias
    axs = matplotlib.pyplot.subplots(len(streams), 3, figsize=(15, 10))[1]
    axs = axs.flatten()
    idx = 0
    for stream in streams:
        axs = plot_arias(stream,
                         axes=axs,
                         axis_index=idx,
                         minfontsize=15,
                         show_maximum=False,
                         title="18km NNE of Hualian, Taiwan")
        idx += 3

    # One plot durations
    durations = [(0.05, 0.75), (0.2, 0.8), (0.05, .95)]
    axes = plot_durations(streams[3], durations)
    assert len(axes) == 3

    # Multiplot durations
    axs = matplotlib.pyplot.subplots(len(streams), 3, figsize=(15, 10))[1]
    axs = axs.flatten()
    idx = 0
    for stream in streams:
        axs = plot_durations(stream,
                             durations,
                             axes=axs,
                             axis_index=idx,
                             minfontsize=15,
                             title="18km NNE of Hualian, Taiwan")
        idx += 3

    # Moveout plots
    epicenter_lat = 24.14
    epicenter_lon = 121.69
    plot_moveout(streams,
                 epicenter_lat,
                 epicenter_lon,
                 'BN1',
                 cmap='nipy_spectral_r',
                 figsize=(15, 10),
                 minfontsize=16,
                 normalize=True,
                 scale=10)
def test_read():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  #where is this script?
    cosmos_dir = os.path.join(homedir, '..', '..', 'data', 'cosmos')
    cwb_dir = os.path.join(homedir, '..', '..', 'data', 'cwb')
    dmg_dir = os.path.join(homedir, '..', '..', 'data', 'dmg')
    geonet_dir = os.path.join(homedir, '..', '..', 'data', 'geonet')
    knet_dir = os.path.join(homedir, '..', '..', 'data', 'knet')
    smc_dir = os.path.join(homedir, '..', '..', 'data', 'smc')
    file_dict = {}
    file_dict['cosmos'] = os.path.join(cosmos_dir, 'Cosmos12TimeSeriesTest.v1')
    file_dict['cwb'] = os.path.join(cwb_dir, '1-EAS.dat')
    file_dict['dmg'] = os.path.join(dmg_dir, 'CE89146.V2')
    file_dict['geonet'] = os.path.join(geonet_dir,
                                       '20161113_110259_WTMC_20.V1A')
    file_dict['knet'] = os.path.join(knet_dir, 'AOM0011801241951.EW')
    file_dict['smc'] = os.path.join(smc_dir, '0111a.smc')

    for file_format in file_dict:
        file_path = file_dict[file_format]
        assert _get_format(file_path) == file_format
        assert _validate_format(file_path, file_format) == file_format

    assert _validate_format(file_dict['knet'], 'smc') == 'knet'
    assert _validate_format(file_dict['dmg'], 'cosmos') == 'dmg'
    assert _validate_format(file_dict['cosmos'], 'invalid') == 'cosmos'

    for file_format in file_dict:
        stream = read_data(file_dict[file_format], file_format)
        assert stream[0].stats.standard['source_format'] == file_format
        stream = read_data(file_dict[file_format])
        assert stream[0].stats.standard['source_format'] == file_format
    # test exception
    try:
        file_path = os.path.join(smc_dir, 'not_a_file.smc')
        read_data(file_path)
        success = True
    except AmptoolsException:
        success = False
    assert success == False
def test_arias():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    data_dir = os.path.join(homedir, '..', 'data',
                            'evolutionary_IM_examples.xlsx')
    df = pd.read_excel(data_dir)
    time = df.iloc[:, 0].values
    # input output is m/s/s
    acc = df.iloc[:, 1].values / 100
    target_IA = df.iloc[:, 4].values[0]
    delta = time[2] - time[1]
    sr = 1 / delta
    header = {
        'delta': delta,
        'sampling_rate': sr,
        'npts': len(acc),
        'units': 'm/s/s',
        'channel': 'H1'
    }
    trace = Trace(data=acc, header=header)
    stream = Stream([trace])
    Ia = calculate_arias(stream, ['channels'])['H1']
    # the target has only one decimal place and is in cm/s/s
    Ia = Ia * 100
    np.testing.assert_almost_equal(Ia, target_IA, decimal=1)

    # input is cm/s/s output is m/s/s
    trace = Trace(data=acc * 100, header=header)
    stream = Stream([trace])
    station = StationSummary.from_stream(stream, ['channels'], ['arias'])
    Ia = station.pgms['ARIAS']['H1']
    # the target has only one decimal place and is in cm/s/s
    Ia = Ia * 100
    np.testing.assert_almost_equal(Ia, target_IA, decimal=1)

    # Test other components
    data_file = os.path.join(homedir, '..', 'data', 'cwb', '2-ECU.dat')
    stream = read_data(data_file)
    station = StationSummary.from_stream(
        stream, ['channels', 'gmrotd', 'rotd50', 'greater_of_two_horizontals'],
        ['arias'])
示例#8
0
def test_acceleration():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    acc_file = os.path.join(homedir, '..', 'data', 'geonet',
                            '20161113_110259_WTMC_20.V2A')
    acc = read_data(acc_file)
    target_g = acc[0].data * GAL_TO_PCTG
    target_m = acc[0].data / 100
    target_cm = acc[0].data

    acc_g = get_acceleration(acc, units='%%g')
    assert acc_g[0].stats['units'] == '%%g'
    np.testing.assert_allclose(acc_g[0], target_g)

    acc_m = get_acceleration(acc, units='m/s/s')
    assert acc_m[0].stats['units'] == 'm/s/s'
    np.testing.assert_allclose(acc_m[0], target_m)

    acc_cm = get_acceleration(acc, units='cm/s/s')
    assert acc_cm[0].stats['units'] == 'cm/s/s'
    np.testing.assert_allclose(acc_cm[0], target_cm)
def get_station_data(event, data_type):
    """
    Reads in seismic data, groups by station, and creates a list of station
    stats. 

    Args:
        event (str): USGS event ID. 
        data_type (str): Seismic data format.  
                         Options are: 
                            knt
                            kik
                            smc
                            V2_V3

    Returns:
        stations (array): Combined streams of acceleration data for each station.
        station_stats (list): Name, lat, lon, and elevation for each station.
        
    """

    # Define filepath and selected stations to use (if there are any).
    filepath = os.path.join('/Users/tnye/PROJECTS/Duration/data/events', event,
                            'ground_motion', data_type)
    select_stns = os.path.join('/Users/tnye/PROJECTS/Duration/data/events',
                               event, 'select_stations', data_type + '.csv')

    os.chdir(filepath)

    # Select acceleration files and combine into a list.
    files_grabbed = []

    if data_type == 'knt':
        types = ('*.EW', '*.NS', '*.UD')
        for files in types:
            files_grabbed.extend(glob.glob(files))
    elif data_type == 'kik':
        types = ('*.EW2', '*.NS2', '*.UD2')
        for files in types:
            files_grabbed.extend(glob.glob(files))
    elif data_type == 'smc':
        types = ('*a.smc')
        for file in glob.glob(types):
            files_grabbed.append(file)
    elif data_type == 'v2':
        types = ('*.V2')
        for file in glob.glob(types):
            files_grabbed.append(file)

    # Read files using Amptools.
    data = []
    try:
        for i in range(len(files_grabbed)):
            data.append(read_data(files_grabbed[i]))
    except:
        print('Bad file', files_grabbed[i])

    # Filter out stations with poor records using a dataframe of pre-selected
    # station names for knet and kiknet events.
    names = pd.read_csv(select_stns, usecols=(['X']), sep=",")
    names_list = names.to_string(index=False)
    filtered_files = []
    for st in data:
        for i in range(len(st)):
            trace = st[i]
            if trace.stats.station in names_list:
                filtered_files.append([trace])
    data = filtered_files

    # Group all acceleration files by station if they are not already grouped.
    if len(data[0]) == 3:
        stations = data
    else:
        stations = group_channels(data)

    # Obtain stats for each station using the stats from the first trace
    # Also add station type to stats
    # (read does not distinguish between knet and kik)
    station_stats = []
    for sta in stations:
        for i in range(len(sta)):
            trace = sta[0]
            stn_type = trace.stats.standard['source_format']
            name = trace.stats['station']
            station_lat = trace.stats.coordinates['latitude']
            station_lon = trace.stats.coordinates['longitude']
            elev = trace.stats.coordinates['elevation']
            station_stats.append(
                [stn_type, name, station_lat, station_lon, elev])

    return (stations, station_stats)
示例#10
0
    st = read(
        '/Users/tnye/PROJECTS/Duration/data/events/us2000dwh6/ground_motion/kik/*.NS2'
    )

elif data_type == 'smc':

    # Read in one component of acceleration.
    os.chdir(
        '/Users/tnye/PROJECTS/Duration/data/events/nc216859/ground_motion/smc')
    files_grabbed = []
    for file in glob.glob('*a.smc'):
        files_grabbed.append(file)

    st_list = []
    for file in files_grabbed:
        data = read_data(file)
        st_list.append(data)
    flat_list = [item for sublist in st_list for item in sublist]

    st = Stream([])
    for trace in flat_list:
        st.append(trace)

elif data_type == 'v2':

    # Read in one component of acceleration.
    os.chdir(
        '/Users/tnye/PROJECTS/Duration/data/events/nc72282711/ground_motion/v2'
    )
    files_grabbed = []
    for file in glob.glob('*.V2'):
def test_grouping():
    homedir = os.path.dirname(os.path.abspath(__file__))

    # cwb
    cwb_files = os.path.join(homedir, '..', 'data', 'cwb', '*')
    cwb_streams = []
    for filename in glob.glob(cwb_files):
        cwb_streams += [read_data(filename)]
    cwb_streams = group_channels(cwb_streams)
    assert len(cwb_streams) == 5
    for stream in cwb_streams:
        assert len(stream) == 3

    # dmg
    dmg_files = os.path.join(homedir, '..', 'data', 'dmg', '*.V2')
    dmg_streams = []
    for filename in glob.glob(dmg_files):
        if (not os.path.basename(filename).startswith('Bad')
                and not os.path.basename(filename).startswith('CE58667')):
            dmg_streams += [read_data(filename)]
    dmg_streams = group_channels(dmg_streams)
    assert len(dmg_streams) == 2
    for stream in dmg_streams:
        assert len(stream) == 3

    # geonet
    geonet_files = os.path.join(homedir, '..', 'data', 'geonet', '*')
    geonet_streams = []
    for filename in glob.glob(geonet_files):
        geonet_streams += [read_data(filename)]
    geonet_streams = group_channels(geonet_streams)
    assert len(geonet_streams) == 7
    for stream in geonet_streams:
        assert len(stream) == 3
        assert len(stream.select(station=stream[0].stats.station)) == 3
        level = stream[0].stats.standard.process_level
        for trace in stream:
            assert trace.stats.standard.process_level == level

    # kiknet
    kiknet_files = os.path.join(homedir, '..', 'data', 'kiknet', '*')
    kiknet_streams = []
    for filename in glob.glob(kiknet_files):
        kiknet_streams += [read_data(filename)]
    kiknet_streams = group_channels(kiknet_streams)
    assert len(kiknet_streams) == 1
    for stream in kiknet_streams:
        assert len(stream) == 3
        assert len(stream.select(station=stream[0].stats.station)) == 3

    # knet
    knet_files = os.path.join(homedir, '..', 'data', 'knet', '*')
    knet_streams = []
    for filename in glob.glob(knet_files):
        knet_streams += [read_data(filename)]
    knet_streams = group_channels(knet_streams)
    assert len(knet_streams) == 9
    for stream in knet_streams:
        assert len(stream) == 3
        assert len(stream.select(station=stream[0].stats.station)) == 3
        pl = stream[0].stats.standard.process_level
        for trace in stream:
            assert trace.stats.standard.process_level == pl

    # obspy
    obspy_files = os.path.join(homedir, '..', 'data', 'obspy', '*')
    obspy_streams = []
    for filename in glob.glob(obspy_files):
        if not filename.endswith('.json'):
            obspy_streams += [read_data(filename)]
    obspy_streams = group_channels(obspy_streams)
    assert len(obspy_streams) == 1
    for stream in obspy_streams:
        assert len(stream) == 3
        assert len(stream.select(station=stream[0].stats.station)) == 3

    # smc
    smc_files = os.path.join(homedir, '..', 'data', 'smc', '*')
    smc_streams = []
    for filename in glob.glob(smc_files):
        if not filename.endswith('.json'):
            smc_streams += [read_data(filename, any_structure=True)]
    smc_streams = group_channels(smc_streams)
    assert len(smc_streams) == 6
    for stream in smc_streams:
        if stream[0].stats.station == 'DVD0':
            assert len(stream) == 1
            assert len(stream.select(station=stream[0].stats.station)) == 1
        elif stream[0].stats.location == '01':
            assert len(stream) == 2
            assert len(stream.select(station=stream[0].stats.station)) == 2
        else:
            assert len(stream) == 3
            assert len(stream.select(station=stream[0].stats.station)) == 3

    # usc
    usc_files = os.path.join(homedir, '..', 'data', 'usc', '*')
    usc_streams = []
    for filename in glob.glob(usc_files):
        if not filename.endswith('.json'):
            if os.path.basename(filename) != '017m30bt.s0a':
                usc_streams += [read_data(filename)]
    usc_streams = group_channels(usc_streams)
    assert len(usc_streams) == 3
    for stream in usc_streams:
        if stream[0].stats.station == '57':
            assert len(stream) == 1
        else:
            assert len(stream) == 3