コード例 #1
0
def get_events(eventids, textfile, eventinfo, directory):
    """Return a list of events from one of the four inputs:

    Args:
        eventids (list or None):
            List of ComCat event IDs.
        textfile (str or None):
            Path to text file containing event IDs or info.
        eventinfo (list or None):
            List containing:
                - id Any string, no spaces.
                - time Any ISO-compatible date/time string.
                - latitude Latitude in decimal degrees.
                - longitude Longitude in decimal degrees.
                - depth Depth in kilometers.
                - magnitude Earthquake magnitude.
        directory (str):
            Path to a directory containing event subdirectories, each
            containing an event.json file, where the ID in the json file
            matches the subdirectory containing it.

    Returns:
        list: ScalarEvent objects.

    """
    events = []
    if eventids is not None:
        for eventid in eventids:
            event = get_event_object(eventid)
            events.append(event)
    elif textfile is not None:
        events = parse_event_file(textfile)
    elif eventinfo is not None:
        eid = eventinfo[0]
        time = eventinfo[1]
        lat = float(eventinfo[2])
        lon = float(eventinfo[3])
        dep = float(eventinfo[4])
        mag = float(eventinfo[5])
        event = ScalarEvent()
        event.fromParams(eid, time, lat, lon, dep, mag)
        events = [event]
    elif directory is not None:
        eventfiles = get_event_files(directory)
        if not len(eventfiles):
            eventids = os.listdir(directory)
            try:
                for eventid in eventids:
                    event = get_event_object(eventid)
                    events.append(event)
            except Exception:
                events = []
        else:
            events = read_event_json_files(eventfiles)
    return events
コード例 #2
0
def read_data_dir(file_format, eventid, files=None):
    """Read desired data files and event dictionary from test directory.

    Args:
        file_format (str):
            Name of desired data format (smc, usc, etc.)
        eventid (str):
            ComCat or other event ID (should exist as a folder)
        files (variable):
            This is either:
                - None This is a flag to retrieve all of the files for an
                  event.
                - regex A regex string that glob can handle (*.dat, AO*.*,
                  etc.)
                - list List of specific files that should be returned.

    Returns:
        tuple:
            - List of data files.
            - Event dictionary.
    """
    datapath = os.path.join('data', 'testdata')
    testroot = pkg_resources.resource_filename('gmprocess', datapath)
    eventdir = os.path.join(testroot, file_format, eventid)
    if not os.path.isdir(eventdir):
        return (None, None)
    datafiles = []
    if files is None:
        allfiles = os.listdir(eventdir)
        allfiles.remove('event.json')
        for dfile in allfiles:
            datafile = os.path.join(eventdir, dfile)
            datafiles.append(datafile)
    elif isinstance(files, str):  # regex
        datafiles = glob.glob(os.path.join(eventdir, files))
    else:  # this is just a list of filenames
        for tfile in files:
            fullfile = os.path.join(eventdir, tfile)
            if os.path.isfile(fullfile):
                datafiles.append(fullfile)

    # read the event.json file
    jsonfile = os.path.join(eventdir, 'event.json')
    if not os.path.isfile(jsonfile):
        event = None
    with open(jsonfile, 'rt') as f:
        tevent = json.load(f)
        event = ScalarEvent()
        if 'magnitude_type' not in tevent.keys():
            tevent['magnitude_type'] = None
        event.fromParams(tevent['id'], tevent['time'], tevent['lat'],
                         tevent['lon'], tevent['depth'], tevent['magnitude'],
                         tevent['magnitude_type'])
    return (datafiles, event)
コード例 #3
0
def read_data_dir(file_format, eventid, files=None):
    """Read desired data files and event dictionary from test directory.

    Args:
        file_format (str):
            Name of desired data format (smc, usc, etc.)
        eventid (str):
            ComCat or other event ID (should exist as a folder)
        files (variable):
            This is either:
                - None This is a flag to retrieve all of the files for an
                  event.
                - regex A regex string that glob can handle (*.dat, AO*.*,
                  etc.)
                - list List of specific files that should be returned.

    Returns:
        tuple:
            - List of data files.
            - Event dictionary.
    """
    datapath = os.path.join('data', 'testdata')
    testroot = pkg_resources.resource_filename('gmprocess', datapath)
    eventdir = os.path.join(testroot, file_format, eventid)
    if not os.path.isdir(eventdir):
        return (None, None)
    datafiles = []
    if files is None:
        allfiles = os.listdir(eventdir)
        allfiles.remove('event.json')
        for dfile in allfiles:
            datafile = os.path.join(eventdir, dfile)
            datafiles.append(datafile)
    elif isinstance(files, str):  # regex
        datafiles = glob.glob(os.path.join(eventdir, files))
    else:  # this is just a list of filenames
        for tfile in files:
            fullfile = os.path.join(eventdir, tfile)
            if os.path.isfile(fullfile):
                datafiles.append(fullfile)

    # read the event.json file
    jsonfile = os.path.join(eventdir, 'event.json')
    if not os.path.isfile(jsonfile):
        event = None
    with open(jsonfile, 'rt') as f:
        tevent = json.load(f)
        event = ScalarEvent()
        event.fromParams(tevent['id'], tevent['time'],
                         tevent['lat'], tevent['lon'],
                         tevent['depth'], tevent['magnitude'])
    return (datafiles, event)
コード例 #4
0
def get_events(eventids, textfile, eventinfo, directory, outdir=None):
    """Find the list of events.

    Args:
        eventids (list or None):
            List of ComCat event IDs.
        textfile (str or None):
            Path to text file containing event IDs or info.
        eventinfo (list or None):
            List containing:
                - id Any string, no spaces.
                - time Any ISO-compatible date/time string.
                - latitude Latitude in decimal degrees.
                - longitude Longitude in decimal degrees.
                - depth Depth in kilometers.
                - magnitude Earthquake magnitude.
                - magnitude_type Earthquake magnitude type.
        directory (str):
            Path to a directory containing event subdirectories, each
            containing an event.json file, where the ID in the json file
            matches the subdirectory containing it.
        outdir (str):
            Output directory.

    Returns:
        list: ScalarEvent objects.

    """
    events = []
    if eventids is not None:
        for eventid in eventids:
            event = get_event_object(eventid)
            events.append(event)
    elif textfile is not None:
        events = parse_event_file(textfile)
    elif eventinfo is not None:
        eid = eventinfo[0]
        time = eventinfo[1]
        lat = float(eventinfo[2])
        lon = float(eventinfo[3])
        dep = float(eventinfo[4])
        mag = float(eventinfo[5])
        mag_type = str(eventinfo[6])
        event = ScalarEvent()
        event.fromParams(eid, time, lat, lon, dep, mag, mag_type)
        events = [event]
    elif directory is not None:
        eventfiles = get_event_files(directory)
        if not len(eventfiles):
            eventids = [
                f for f in os.listdir(directory) if not f.startswith('.')
            ]
            for eventid in eventids:
                try:
                    event = get_event_object(eventid)
                    events.append(event)

                    # If the event ID has been updated, make sure to rename
                    # the source folder and issue a warning to the user
                    if event.id != eventid:
                        old_dir = os.path.join(directory, eventid)
                        new_dir = os.path.join(directory, event.id)
                        os.rename(old_dir, new_dir)
                        logging.warn('Directory %s has been renamed to %s.' %
                                     (old_dir, new_dir))
                except:
                    logging.warning('Could not get info for event id: %s' %
                                    eventid)
        else:
            events = read_event_json_files(eventfiles)

    elif outdir is not None:
        eventfiles = get_event_files(outdir)
        if not len(eventfiles):
            eventids = os.listdir(outdir)
            for eventid in eventids:
                try:
                    event = get_event_object(eventid)
                    events.append(event)
                except:
                    logging.warning('Could not get info for event id: %s' %
                                    eventid)
        else:
            events = read_event_json_files(eventfiles)

    return events
コード例 #5
0
def create_travel_time_dataframe(streams, catalog_file, ddepth, ddist, model):
    """
    Creates a travel time dataframe, which contains the phase arrrival times
    for each station the StreamCollection, for each event in the catalog.
    This uses an interpolation method to save time, and the fineness of the
    interpolation grid can be adjusted using the ddepth and ddist parameters.
    Using the recommended values of ddepth=5 and ddist=0.1 are generally
    sufficient to achieve less than 0.1 seconds of error in the travel times,
    for most cases.

    Args:
        streams (StreamCollection):
            Streams to calculate travel times for.
        catalog_file (str):
            The path to the CSV file (from ComCat) which contains event info.
        ddepth (float):
            The depth spacing (in km) for the interpolation grid.
            Recommended value is 5 km.
        ddist (float):
            The distance spacing (in decimal degrees) for the interpolation
            grid. Recommend value is 0.1 degrees.

    Retuns:
        A tuple, containing the travel time dataframe and the catalog
        (list of ScalarEvent objects).
    """

    # Read the catalog file and create a catalog (list) of ScalarEvent objects
    df_catalog = pd.read_csv(catalog_file)

    # Replace any negative depths with 0
    df_catalog['depth'].clip(lower=0, inplace=True)
    catalog = []
    for idx, row in df_catalog.iterrows():
        event = ScalarEvent()
        event.fromParams(row['id'], row['time'], row['latitude'],
                         row['longitude'], row['depth'], row['mag'])
        catalog.append(event)

    # Store the lat, lon, and id for each stream
    st_lats, st_lons, st_ids = [], [], []
    for st in streams:
        st_lats.append(st[0].stats.coordinates.latitude)
        st_lons.append(st[0].stats.coordinates.longitude)
        st_ids.append(st[0].stats.network + '.' + st[0].stats.station)

    # Calculate the distance for each stream, for each event
    # Store distances in a matrix
    distances_matrix = np.zeros((len(streams), len(catalog)))
    for idx, st in enumerate(streams):
        distances_matrix[idx] = locations2degrees(
            np.repeat(st_lats[idx], len(catalog)),
            np.repeat(st_lons[idx], len(catalog)),
            df_catalog['latitude'], df_catalog['longitude'])
    distances_matrix = distances_matrix.T

    # Calculate the minimum depth/distance values for the inteprolation grid
    # This includes a buffer to avoid interpolating at the endpoints
    # Make sure that the minimum depth/distance values aren't negative
    minimum_depth = max([0, min(df_catalog['depth']) - 2 * ddepth])
    minimum_dist = max([0, distances_matrix.min() - 2 * ddist])
    depth_grid = np.arange(
        minimum_depth, max(df_catalog['depth']) + 2 * ddepth, ddepth)
    distance_grid = np.arange(
        minimum_dist, distances_matrix.max() + 2 * ddist, ddist)

    # For each distance and each depth, compute the travel time
    # Store values in the "times" 2D matrix
    taupy_model = TauPyModel(model)
    times = np.zeros((len(depth_grid), len(distance_grid)))
    for i, depth in enumerate(depth_grid):
        for j, dist in enumerate(distance_grid):
            arrivals = taupy_model.get_travel_times(
                depth, dist, ['p', 'P', 'Pn'])
            if not arrivals:
                times[i][j] = np.nan
            else:
                times[i][j] = arrivals[0].time

    # Use 2D interpolation to interpolate values at the actual points
    points = np.transpose([np.tile(distance_grid, len(depth_grid)),
                           np.repeat(depth_grid, len(distance_grid))])
    new_points = np.vstack(
        (distances_matrix.flatten(),
         np.repeat(df_catalog['depth'], len(streams)))).T
    interpolated_times = griddata(points, times.flatten(), new_points).reshape(
        (-1, len(streams)))
    utcdatetimes = np.array([UTCDateTime(time) for time in df_catalog['time']])
    interpolated_times = utcdatetimes.reshape(-1, 1) + interpolated_times

    # Store travel time information in a DataFrame
    # Column indicies are the station ids, rows are the earthquake ids
    df = pd.DataFrame(data=interpolated_times, index=df_catalog['id'],
                      columns=st_ids)

    # Remove any duplicate columns which might result from a station with
    # multiple instruments
    df = df.loc[:, ~df.columns.duplicated()]
    return df, catalog
コード例 #6
0
def test_scalar():
    eid = 'usp000hat0'
    time = UTCDateTime('2010-04-06 22:15:01.580')
    lat = 2.383
    lon = 97.048
    depth = 31.0
    mag = 7.8

    event = ScalarEvent()
    origin = Origin(resource_id=eid,
                    time=time,
                    latitude=lat,
                    longitude=lon,
                    depth=depth * 1000)
    magnitude = Magnitude(mag=mag)
    event.origins = [origin]
    event.magnitudes = [magnitude]

    assert event.id == eid
    assert event.time == time
    assert event.latitude == lat
    assert event.longitude == lon
    assert event.depth_km == depth
    assert event.magnitude == mag

    subdir = os.path.join('data', 'testdata', 'usp000hat0_quakeml.xml')
    quakeml = pkg_resources.resource_filename('gmprocess', subdir)
    catalog = read_events(quakeml)
    tevent = catalog.events[0]
    event = ScalarEvent.fromEvent(tevent)
    assert event.id == 'quakeml:us.anss.org/origin/pde20100406221501580_31'
    assert event.time == time
    assert event.latitude == lat
    assert event.longitude == lon
    assert event.depth_km == depth
    assert event.magnitude == mag

    event = ScalarEvent()
    event.fromParams(eid, time, lat, lon, depth, mag)
    assert isinstance(event, Event)
    assert event.origins[0].resource_id == eid
    assert event.origins[0].time == time
    assert event.origins[0].latitude == lat
    assert event.origins[0].longitude == lon
    assert event.origins[0].depth == depth * 1000
    assert event.magnitudes[0].mag == mag

    tevent = Event()
    origin = Origin(resource_id=eid,
                    time=time,
                    longitude=lon,
                    latitude=lat,
                    depth=depth * 1000)
    magnitude = Magnitude(resource_id=eid, mag=mag)
    tevent.origins = [origin]
    tevent.magnitudes = [magnitude]
    event2 = ScalarEvent.fromEvent(tevent)
    assert isinstance(event2, Event)
    assert event2.origins[0].resource_id == eid
    assert event2.origins[0].time == time
    assert event2.origins[0].latitude == lat
    assert event2.origins[0].longitude == lon
    assert event2.origins[0].depth == depth * 1000
    assert event2.magnitudes[0].mag == mag
コード例 #7
0
def test_scalar():
    eid = 'usp000hat0'
    time = UTCDateTime('2010-04-06 22:15:01.580')
    lat = 2.383
    lon = 97.048
    depth = 31.0
    mag = 7.8

    event = ScalarEvent()
    origin = Origin(resource_id=eid,
                    time=time,
                    latitude=lat,
                    longitude=lon,
                    depth=depth * 1000)
    magnitude = Magnitude(mag=mag)
    event.origins = [origin]
    event.magnitudes = [magnitude]

    assert event.id == eid
    assert event.time == time
    assert event.latitude == lat
    assert event.longitude == lon
    assert event.depth_km == depth
    assert event.magnitude == mag

    subdir = os.path.join('data', 'testdata', 'usp000hat0_quakeml.xml')
    quakeml = pkg_resources.resource_filename('gmprocess', subdir)
    catalog = read_events(quakeml)
    tevent = catalog.events[0]
    event = ScalarEvent.fromEvent(tevent)
    assert event.id == 'quakeml:us.anss.org/origin/pde20100406221501580_31'
    assert event.time == time
    assert event.latitude == lat
    assert event.longitude == lon
    assert event.depth_km == depth
    assert event.magnitude == mag

    event = ScalarEvent()
    event.fromParams(eid, time, lat, lon, depth, mag)
    assert isinstance(event, Event)
    assert event.origins[0].resource_id == eid
    assert event.origins[0].time == time
    assert event.origins[0].latitude == lat
    assert event.origins[0].longitude == lon
    assert event.origins[0].depth == depth * 1000
    assert event.magnitudes[0].mag == mag

    tevent = Event()
    origin = Origin(resource_id=eid,
                    time=time,
                    longitude=lon,
                    latitude=lat,
                    depth=depth * 1000)
    magnitude = Magnitude(resource_id=eid,
                          mag=mag)
    tevent.origins = [origin]
    tevent.magnitudes = [magnitude]
    event2 = ScalarEvent.fromEvent(tevent)
    assert isinstance(event2, Event)
    assert event2.origins[0].resource_id == eid
    assert event2.origins[0].time == time
    assert event2.origins[0].latitude == lat
    assert event2.origins[0].longitude == lon
    assert event2.origins[0].depth == depth * 1000
    assert event2.magnitudes[0].mag == mag