Ejemplo n.º 1
0
def extract_tectonics(df, stations, pre, pos, outpath, label, latitude,
                      longitude):
    model = TauPyModel(model='iasp91')

    for date, df in tectonic.iterate_days(df):
        date = UTCDateTime(date)
        st = Stream()
        for station in stations:
            for channel in ['HHE', 'HHN', 'HHZ']:
                filepath = DB_MSEED_PATH_FMT.format(waves_path=WAVES_PATH,
                                                    year=date.year,
                                                    julday=date.julday,
                                                    station=station,
                                                    channel=channel)
                if path.isfile(filepath):
                    st += read(filepath)

        for i, row in df.iterrows():
            print(row.time)
            source_depth_in_km = row.depth
            if source_depth_in_km < 0:
                source_depth_in_km = 0

            distance_in_degree = locations2degrees(row.latitude, row.longitude,
                                                   latitude, longitude)

            arrivals = model.get_travel_times(
                source_depth_in_km=source_depth_in_km,
                distance_in_degree=distance_in_degree,
                phase_list=['p', 's', 'P', 'S', 'Pdiff', 'Sdiff'])

            origin_time = UTCDateTime(row.time)

            for arrival in arrivals:
                if arrival.name in ['p', 'P', 'Pdiff']:
                    p = origin_time + arrival.time

            for arrival in arrivals:
                if arrival.name in ['s', 'S', 'Sdiff']:
                    s = origin_time + arrival.time

            starttime = p - pre
            endtime = s + pos

            _st = st.slice(starttime=starttime, endtime=endtime)
            if len(_st) > 0:
                msd_outpath = path.join(
                    outpath, MSD_FNAME_FMT.format(**st_to_fname(label, _st)))
                _st.write(msd_outpath, format='MSEED')

            d = dict(longitude=row.longitude,
                     latitude=row.latitude,
                     depth=row.depth,
                     magnitude=row.magnitude,
                     time=row.time)

            jsn_outpath = path.join(
                outpath, JSN_FNAME_FMT.format(**st_to_fname(label, _st)))
            with open(jsn_outpath, 'w') as f:
                json.dump(d, f, indent=4)
Ejemplo n.º 2
0
def get_station_delays(station_coords,sources,velmod='PREM',phase_list=['s','S']):
    '''
    Given an array of station corodiantes and sources calculate the travel time
    from each source toe ach site.
    
    velmod is the FULL path to the .npz file used by TauPy
    '''
    
    from obspy.taup import TauPyModel
    from numpy import zeros
    from obspy.geodetics.base import locations2degrees

    model = TauPyModel(model=velmod)
    
    #Initalize output variabe
    delay_time=zeros((len(sources),len(station_coords)))
    
    #Loop over sources
    for ksource in range(len(sources)):
        
        print('Working on source %d of %d ' % (ksource,len(sources)))
        
        #loop over sites
        for ksite in range(len(station_coords)):
        
            distance_in_degrees=locations2degrees(station_coords[ksite,1],station_coords[ksite,0],
                                                      sources[ksource,2],sources[ksource,1])
            
            arrivals = model.get_travel_times(source_depth_in_km=sources[ksource,3],
                                  distance_in_degree=distance_in_degrees,phase_list=phase_list)
            
            delay_time[ksource,ksite]=arrivals[0].time
            
    return delay_time
Ejemplo n.º 3
0
 def _earth_distance(row):
     lat0 = row['Latitude_left']
     long0 = row['Longitude_left']
     lat1 = row['Latitude_right']
     long1 = row['Longitude_right']
     return np.deg2rad(locations2degrees(lat0, long0, lat1,
                                         long1)) * NOMINAL_EARTH_RADIUS_KM
Ejemplo n.º 4
0
def returnDistDeg(staLat, staLon, eqLat, eqLon):
    '''This function returns the Azimuth when given the location parameters
       of an event and station.'''
    from obspy.geodetics.base import locations2degrees
    from obspy.geodetics.base import gps2dist_azimuth
    # for vmqc001 you need the import below.
    #from obspy.core.util import locations2degrees

    #staLat = 34.945910 # default station is ANMO
    #staLon = -106.4572
    #staLat = 51.882130
    #staLon = -176.684

    #eqLat = 37.580 #default event in Turkey, 2017055 11:07:27, M5.6
    #eqLon = 38.440
    #eqLat = 13.828
    #eqLon = -92.269
    #eqLat = -19.281
    #eqLon = -63.905

    # get the distance between the earthquake and station in degrees
    DegDist = locations2degrees(staLat, staLon, eqLat, eqLon)

    # print the result
    print 'The distance between the station and earthquake is: ' + str(DegDist)

    return DegDist
def plot_raw(rawdir, tcollection, event):
    """Make PNG plots of a collection of raw waveforms.

    Args:
        rawdir (str):
            Directory where PNG files should be saved.
        tcollection (StreamCollection):
            Sequence of streams.
        event (ScalarEvent):
            Event object.

    """
    model = TauPyModel(model="iasp91")
    source_depth = event.depth_km
    if source_depth < 0:
        source_depth = 0
    eqlat = event.latitude
    eqlon = event.longitude
    for stream in tcollection:
        stlat = stream[0].stats.coordinates['latitude']
        stlon = stream[0].stats.coordinates['longitude']
        dist = float(locations2degrees(eqlat, eqlon, stlat, stlon))
        try:
            arrivals = model.get_travel_times(source_depth_in_km=source_depth,
                                              distance_in_degree=dist,
                                              phase_list=['P', 'p', 'Pn'])
            arrival = arrivals[0]
            arrival_time = arrival.time
        except BaseException as e:
            fmt = ('Exception "%s" generated by get_travel_times() dist=%.3f '
                   'depth=%.1f')
            logging.warning(fmt % (str(e), dist, source_depth))
            arrival_time = 0.0
        ptime = arrival_time + (event.time - stream[0].stats.starttime)
        outfile = os.path.join(rawdir, '%s.png' % stream.get_id())

        fig, axeslist = plt.subplots(nrows=3, ncols=1, figsize=(12, 6))
        for ax, trace in zip(axeslist, stream):
            ax.plot(trace.times(), trace.data, color='k')
            ax.set_xlabel('seconds since start of trace')
            ax.set_title('')
            ax.axvline(ptime, color='r')
            ax.set_xlim(left=0, right=trace.times()[-1])
            legstr = '%s.%s.%s.%s' % (trace.stats.network, trace.stats.station,
                                      trace.stats.location,
                                      trace.stats.channel)
            ax.legend(labels=[legstr], frameon=True, loc='upper left')
            tbefore = event.time + arrival_time < trace.stats.starttime + 1.0
            tafter = event.time + arrival_time > trace.stats.endtime - 1.0
            if tbefore or tafter:
                legstr = 'P arrival time %.1f seconds' % ptime
                left, right = ax.get_xlim()
                xloc = left + (right - left) / 20
                bottom, top = ax.get_ylim()
                yloc = bottom + (top - bottom) / 10
                ax.text(xloc, yloc, legstr, color='r')
        plt.savefig(outfile, bbox_inches='tight')
        plt.close()
def get_taupy_points(
    center_lat, center_lon, ev_lat, ev_lon, ev_depth, stime, etime, mini, maxi, ev_otime, phase_shift, sll, slm
):

    distance = locations2degrees(center_lat, center_lon, ev_lat, ev_lon)
    # print(distance)

    model = TauPyModel(model="ak135")
    arrivals = model.get_pierce_points(ev_depth, distance)
    # arrivals = earthmodel.get_pierce_points(ev_depth,distance,phase_list=('PP','P^410P'))

    # compute the vespagram window
    start_vespa = stime - mini
    end_vespa = etime - maxi

    # compare the arrival times with the time window
    count = 0
    k = 0
    phase_name_info = []
    phase_slowness_info = []
    phase_time_info = []

    for i_elem in arrivals:
        # print(i_elem)
        dummy_phase = arrivals[count]
        # print(dummy_phase)
        # phase time in seconds
        taup_phase_time = dummy_phase.time
        # print(taup_phase_time)
        # slowness of the phase
        taup_phase_slowness = dummy_phase.ray_param_sec_degree
        # compute the UTC travel phase time
        taup_phase_time2 = ev_otime + taup_phase_time + phase_shift

        # print(start_vespa)
        # print(end_vespa)
        # print(taup_phase_time2)

        if start_vespa <= taup_phase_time2 <= end_vespa:  # time window
            if sll <= taup_phase_slowness <= slm:  # slowness window

                # seconds inside the vespagram
                taup_mark = taup_phase_time2 - start_vespa
                # store the information
                phase_name_info.append(dummy_phase.name)
                phase_slowness_info.append(dummy_phase.ray_param_sec_degree)
                phase_time_info.append(taup_mark)
                # print(phases_info[k])
                k += 1

        count += 1

    # print(phase_name_info)

    phase_slowness_info = np.array(phase_slowness_info)
    phase_time_info = np.array(phase_time_info)

    return phase_name_info, phase_slowness_info, phase_time_info
def get_taupy_points(center_lat, center_lon, ev_lat, ev_lon, ev_depth, stime,
                     etime, mini, maxi, ev_otime, phase_shift, sll, slm):

    distance = locations2degrees(center_lat, center_lon, ev_lat, ev_lon)
    #print(distance)

    model = TauPyModel(model="ak135")
    arrivals = model.get_pierce_points(ev_depth, distance)
    #arrivals = earthmodel.get_pierce_points(ev_depth,distance,phase_list=('PP','P^410P'))

    # compute the vespagram window
    start_vespa = stime - mini
    end_vespa = etime - maxi

    # compare the arrival times with the time window
    count = 0
    k = 0
    phase_name_info = []
    phase_slowness_info = []
    phase_time_info = []

    for i_elem in arrivals:
        #print(i_elem)
        dummy_phase = arrivals[count]
        #print(dummy_phase)
        # phase time in seconds
        taup_phase_time = dummy_phase.time
        #print(taup_phase_time)
        # slowness of the phase
        taup_phase_slowness = dummy_phase.ray_param_sec_degree
        # compute the UTC travel phase time
        taup_phase_time2 = ev_otime + taup_phase_time + phase_shift

        # print(start_vespa)
        # print(end_vespa)
        # print(taup_phase_time2)

        if start_vespa <= taup_phase_time2 <= end_vespa:  # time window
            if sll <= taup_phase_slowness <= slm:  # slowness window

                # seconds inside the vespagram
                taup_mark = taup_phase_time2 - start_vespa
                # store the information
                phase_name_info.append(dummy_phase.name)
                phase_slowness_info.append(dummy_phase.ray_param_sec_degree)
                phase_time_info.append(taup_mark)
                #print(phases_info[k])
                k += 1

        count += 1

    #print(phase_name_info)

    phase_slowness_info = np.array(phase_slowness_info)
    phase_time_info = np.array(phase_time_info)

    return phase_name_info, phase_slowness_info, phase_time_info
Ejemplo n.º 8
0
def pick_travel(stream, origin, model=None, picker_config=None):
    '''Use TauP travel time model to find P-Phase arrival time.

    Args:
        stream (StationStream):
            StationStream containing 1 or more channels of waveforms.
        origin (ScalarEvent):
            Event origin/magnitude information.
        model (TauPyModel):
            TauPyModel object for computing travel times.
    Returns:
        tuple:
            - Best estimate for p-wave arrival time (s since start of trace).
            - Mean signal to noise ratio based on the pick.
    '''
    if model is None:
        if picker_config is None:
            picker_config = get_config(section='pickers')
        model = TauPyModel(picker_config['travel_time']['model'])
    if stream[0].stats.starttime == NAN_TIME:
        return (-1, 0)
    lat = origin.latitude
    lon = origin.longitude
    depth = origin.depth_km
    if depth < 0:
        depth = 0
    etime = origin.time
    slat = stream[0].stats.coordinates.latitude
    slon = stream[0].stats.coordinates.longitude

    dist_deg = locations2degrees(lat, lon, slat, slon)
    try:
        arrivals = model.get_travel_times(
            source_depth_in_km=depth,
            distance_in_degree=dist_deg,
            phase_list=['P', 'p', 'Pn'])
    except BaseException as e:
        fmt = ('Exception "%s" generated by get_travel_times() '
               'dist=%.3f depth=%.1f')
        logging.warning(fmt % (str(e), dist_deg, depth))
        arrivals = []
    if not len(arrivals):
        return (-1, 0)

    # arrival time is time since origin
    arrival = arrivals[0]
    # we need time since start of the record
    minloc = arrival.time + (etime - stream[0].stats.starttime)
    mean_snr = calc_snr(stream, minloc)
    return (minloc, mean_snr)
def attach_coordinates_to_traces(stream, inventory, event=None):
    """
    Function to add coordinates to traces.

    It extracts coordinates from a :class:`obspy.station.inventory.Inventory`
    object and writes them to each trace's stats attribute. If an event is
    given, the distance in degree will also be attached.

    :param stream: Waveforms for the array processing.
    :type stream: :class:`obspy.core.stream.Stream`
    :param inventory: Station metadata for waveforms
    :type inventory: :class:`obspy.station.inventory.Inventory`
    :param event: If the event is given, the event distance in degree will also
     be attached to the traces.
    :type event: :class:`obspy.core.event.Event`
    """
    # Get the coordinates for all stations
    coords = {}
    for network in inventory:
        for station in network:
            # coords["%s.%s" % (network.code, station.code)] = \
            coords[".%s" % (station.code)] = \
                {"latitude": station.latitude,
                 "longitude": station.longitude,
                 "elevation": station.elevation}

    # Calculate the event-station distances.
    if event:
        event_lat = event.origins[0].latitude
        event_lng = event.origins[0].longitude
        for value in coords.values():
            value["distance"] = locations2degrees(value["latitude"],
                                                  value["longitude"],
                                                  event_lat, event_lng)

    # Attach the information to the traces.
    for trace in stream:
        station = ".".join(trace.id.split(".")[:2])
        value = coords[station]
        trace.stats.coordinates = AttribDict()
        trace.stats.coordinates.latitude = value["latitude"]
        trace.stats.coordinates.longitude = value["longitude"]
        trace.stats.coordinates.elevation = value["elevation"]
        if event:
            trace.stats.distance = value["distance"]
def attach_coordinates_to_traces(stream, inventory, event=None):
    """
    Function to add coordinates to traces.

    It extracts coordinates from a :class:`obspy.station.inventory.Inventory`
    object and writes them to each trace's stats attribute. If an event is
    given, the distance in degree will also be attached.

    :param stream: Waveforms for the array processing.
    :type stream: :class:`obspy.core.stream.Stream`
    :param inventory: Station metadata for waveforms
    :type inventory: :class:`obspy.station.inventory.Inventory`
    :param event: If the event is given, the event distance in degree will also
     be attached to the traces.
    :type event: :class:`obspy.core.event.Event`
    """
    # Get the coordinates for all stations
    coords = {}
    for network in inventory:
        for station in network:
            # coords["%s.%s" % (network.code, station.code)] = \
            coords[".%s" % (station.code)] = {
                "latitude": station.latitude,
                "longitude": station.longitude,
                "elevation": station.elevation,
            }

    # Calculate the event-station distances.
    if event:
        event_lat = event.origins[0].latitude
        event_lng = event.origins[0].longitude
        for value in coords.values():
            value["distance"] = locations2degrees(value["latitude"], value["longitude"], event_lat, event_lng)

    # Attach the information to the traces.
    for trace in stream:
        station = ".".join(trace.id.split(".")[:2])
        value = coords[station]
        trace.stats.coordinates = AttribDict()
        trace.stats.coordinates.latitude = value["latitude"]
        trace.stats.coordinates.longitude = value["longitude"]
        trace.stats.coordinates.elevation = value["elevation"]
        if event:
            trace.stats.distance = value["distance"]
Ejemplo n.º 11
0
def stream_add_stats(data_stream,inv,evt,write_sac=False,rotate_in_obspy=False):
    for net in inv:
        for sta in net:
            str1=data_stream.select(network=net.code,station=sta.code)
            print(str(net.code),str(sta.code),len(str1))
            if len(str1) == 0:
                continue
            # update in future to deal with multiple channel (total_number_of channels)
            if len(str1) % 3 !=0:
                print('Problem: missing components', str1); exit()
                
            for tr in str1:
                for chan in sta:
                    if tr.stats.channel == chan.code and tr.stats.location == chan.location_code:
                        break
                else:
                    print('Problem finding channel in inventory',tr); exit()
                tr.stats.coordinates={'latitude':chan.latitude,'longitude':chan.longitude}
                (tr.stats.distance,tr.stats.azimuth,tr.stats.back_azimuth)=gps2dist_azimuth(
                    chan.latitude, chan.longitude, evt.origins[0].latitude, evt.origins[0].longitude)
                if write_sac==True:
                    sac= AttribDict()
                    sac.kstnm=str(sta.code);
                    sac.knetwk=str(net.code);
                    sac.kcmpnm=str(chan.code)
                    sac.khole=str(chan.location_code)
                    sac.stla=chan.latitude; sac.stlo=chan.longitude; sac.stel=chan.elevation
                    sac.evla=evt.origins[0].latitude; sac.evlo=evt.origins[0].longitude;
                    sac.evdp=evt.origins[0].depth/1000. # in km
                    sac.mag=evt.magnitudes[0].mag; time=evt.origins[0].time
    
                    sac.nzyear,  sac.nzjday,  sac.nzhour,  sac.nzmin,  sac.nzsec,  sac.nzmsec=time.year, time.julday, time.hour, time.minute, time.second,  time.microsecond/1000
                    sac.o=0.
                    sac.b=tr.stats.starttime-time # this is very important!!
                    sac.kevnm=str(time)
                    sac.cmpaz=chan.azimuth
                    # dip is from horizontal downward; inc is from vertical downward
                    sac.cmpinc=chan.dip+90
                    sac.gcarc = locations2degrees(evt.origins[0].latitude, evt.origins[0].longitude, chan.latitude, chan.longitude)
                    sac.dist,sac.az,sac.baz= tr.stats.distance/1000,tr.stats.azimuth,tr.stats.back_azimuth
                    tr.stats.sac=sac
                    tr_name=sta.code+'.'+net.code+'.'+chan.location_code+'.'+chan.code+'.sac'
                    tr.write(tr_name,format='SAC')
Ejemplo n.º 12
0
def pick_travel(stream, origin, picker_config=None):
    '''Use TauP travel time model to find P-Phase arrival time.

    Args:
        stream (StationStream):
            StationStream containing 1 or more channels of waveforms.
        origin (ScalarEvent):
            Event origin/magnitude information.
        picker_config (dict):
            Dictionary containing picker configuration.
    Returns:
        tuple:
            - Best estimate for p-wave arrival time (s since start of trace).
            - Mean signal to noise ratio based on the pick.
    '''
    if picker_config is None:
        picker_config = get_config(section='pickers')
    model = picker_config['travel_time']['model']
    model = TauPyModel(model=model)
    if stream[0].stats.starttime == NAN_TIME:
        return (-1, 0)
    lat = origin.latitude
    lon = origin.longitude
    depth = origin.depth_km
    if depth < 0:
        depth = 0
    etime = origin.time
    slat = stream[0].stats.coordinates.latitude
    slon = stream[0].stats.coordinates.longitude

    dist_deg = locations2degrees(lat, lon, slat, slon)
    arrivals = model.get_travel_times(source_depth_in_km=int(depth),
                                      distance_in_degree=dist_deg,
                                      phase_list=['P', 'p', 'Pn'])
    if not len(arrivals):
        return (-1, 0)

    # arrival time is time since origin
    arrival = arrivals[0]
    # we need time since start of the record
    minloc = arrival.time + (etime - stream[0].stats.starttime)
    mean_snr = calc_snr(stream, minloc)
    return (minloc, mean_snr)
Ejemplo n.º 13
0
def get_station_delays(station_coords,
                       sources,
                       velmod='PREM',
                       phase_list=['s', 'S']):
    '''
    Given an array of station corodiantes and sources calculate the travel time
    from each source toe ach site.
    
    velmod is the FULL path to the .npz file used by TauPy
    '''

    from obspy.taup import TauPyModel
    from numpy import zeros
    from obspy.geodetics.base import locations2degrees

    model = TauPyModel(model=velmod)

    #Initalize output variabe
    delay_time = zeros((len(sources), len(station_coords)))

    #Loop over sources
    for ksource in range(len(sources)):

        print('Working on source %d of %d ' % (ksource, len(sources)))

        #loop over sites
        for ksite in range(len(station_coords)):

            distance_in_degrees = locations2degrees(station_coords[ksite, 1],
                                                    station_coords[ksite, 0],
                                                    sources[ksource, 2],
                                                    sources[ksource, 1])

            arrivals = model.get_travel_times(
                source_depth_in_km=sources[ksource, 3],
                distance_in_degree=distance_in_degrees,
                phase_list=phase_list)

            delay_time[ksource, ksite] = arrivals[0].time

    return delay_time
Ejemplo n.º 14
0
    def griding(self, coords, sta_coords, dx, dy, depth):

        lon1 = sta_coords['longitude']
        lat1 = sta_coords['latitude']
        xmin = coords[0]
        xmax = coords[1]
        ymin = coords[2]
        ymax = coords[3]

        columns = int((xmax - xmin) / dx)
        rows = int((ymax - ymin) / dy)

        travel_times = np.zeros((rows, columns))
        columns_line = np.linspace(xmin, xmax, columns)
        rows_line = np.linspace(ymin, ymax, rows)

        model = TauPyModel(model="iasp91")

        for i in range(rows):
            for j in range(columns):
                lon2 = columns_line[j]
                lat2 = rows_line[i]

                #distance, az1, az2 = gps2dist_azimuth(lat1, lon1, lat2, lon2, a=6378137.0, f=0.0033528106647474805)
                #distance = kilometers2degrees(distance/1000, radius=6378.137)

                #arrivals = model.get_travel_times(source_depth_in_km=depth, distance_in_degree=distance,
                #                                  phase_list = ["P", "p", "pP", "PP"])
                arrivals = model.get_travel_times(
                    source_depth_in_km=depth,
                    distance_in_degree=locations2degrees(
                        lat1, lon1, lat2, lon2),
                    phase_list=["P"])

                arrival = [(tt.time, tt.name) for tt in arrivals]
                #arrivals, phases = zip(*arrival)
                travel_times[i, j] = arrival[0][0]

        #travel_times = np.flipud(travel_times)
        return travel_times
def pick_travel(stream, origin, picker_config=None):
    '''Use TauP travel time model to find P-Phase arrival time.

    Args:
        stream (StationStream):
            StationStream containing 1 or more channels of waveforms.
        origin (ScalarEvent):
            Event origin/magnitude information.
        picker_config (dict):
            Dictionary containing picker configuration.
    Returns:
        tuple:
            - Best estimate for p-wave arrival time (s since start of trace).
            - Mean signal to noise ratio based on the pick.
    '''
    if picker_config is None:
        picker_config = get_config(section='pickers')
    model = picker_config['travel_time']['model']
    model = TauPyModel(model=model)
    if stream[0].stats.starttime == NAN_TIME:
        return (-1, 0)
    lat = origin.latitude
    lon = origin.longitude
    depth = origin.depth_km
    etime = origin.time
    slat = stream[0].stats.coordinates.latitude
    slon = stream[0].stats.coordinates.longitude

    dist_deg = locations2degrees(lat, lon, slat, slon)
    arrivals = model.get_travel_times(source_depth_in_km=int(depth),
                                      distance_in_degree=dist_deg,
                                      phase_list=['P', 'p', 'Pn'])
    if not len(arrivals):
        return (-1, 0)
    arrival = arrivals[0]
    minloc = arrival.time + (etime - stream[0].stats.starttime)
    mean_snr = calc_snr(stream, minloc)
    return (minloc, mean_snr)
Ejemplo n.º 16
0
    def _get_segments(_i, lat, lon, end_points):
        """This function takes in lats and lons, and returns segments for a
        LineCollection.

        :param _i: index for end_point list
        :type _i: int
        :param lat: latitudes
        :type lat: list
        :param lon: longitudes
        :type lon: list
        :param end_points: end_points of trajectories (defining time jumps)
        :type end_points: list
        :return: tuple with (segments, indices)
        """

        # Create empty lists
        indices = []
        segments = []

        # For each point pair in the trajectory of the Mermaid the loop
        # creates Line segements if the distance is smaller the 0.55deg
        for _j, (lat1, lon1, lat2,
                 lon2) in enumerate(zip(lat[:-1], lon[:-1], lat[1:], lon[1:])):

            # Compute distance between points
            dist = locations2degrees(lat1, lon1, lat2, lon2)

            # Only create segment if
            # Segment if distance is smaller than 5 degrees.
            if dist < 3:
                segments.append([(lon1, lat1), (lon2, lat2)])
                indices.append(_j)
            else:
                end_points.append((_i, lat1, lon1, dist))

        return segments, indices
Ejemplo n.º 17
0
    def plot_windows(
        self,
        event: str,
        window_set_name: str,
        distance_bins: int = 500,
        ax=None,
        show: bool = True,
    ):
        """
        Plot all selected windows on a epicentral distance vs duration plot
        with the color encoding the selected channels. This gives a quick
        overview of how well selected the windows for a certain event and
        iteration are.

        :param event: The name of the event.
        :type event: str
        :param window_set_name: The window set.
        :type window_set_name: str
        :param distance_bins: The number of bins on the epicentral
            distance axis. Defaults to 500
        :type distance_bins: int, optional
        :param ax: If given, it will be plotted to this ax. Defaults to None
        :type ax: matplotlib.axes.Axes, optional
        :param show: If true, ``plt.show()`` will be called before returning.
            defaults to True
        :type show: bool, optional
        :return: The potentially created axes object.
        """
        from obspy.geodetics.base import locations2degrees

        event = self.comm.events.get(event)
        window_manager = self.comm.windows.read_all_windows(
            event=event["event_name"], window_set_name=window_set_name)
        starttime = event["origin_time"]
        duration = (self.comm.project.simulation_settings["end_time_in_s"] -
                    self.comm.project.simulation_settings["start_time_in_s"])

        # First step is to calculate all epicentral distances.
        stations = self.comm.query.get_all_stations_for_event(
            event["event_name"])

        for s in stations.values():
            s["epicentral_distance"] = locations2degrees(
                event["latitude"],
                event["longitude"],
                s["latitude"],
                s["longitude"],
            )

        # Plot from 0 to however far it goes.
        min_epicentral_distance = 0
        max_epicentral_distance = math.ceil(
            max(_i["epicentral_distance"] for _i in stations.values()))
        epicentral_range = max_epicentral_distance - min_epicentral_distance

        if epicentral_range == 0:
            raise ValueError

        # Create the image that will represent the pictures in an epicentral
        # distance plot. By default everything is black.
        #
        # First dimension: Epicentral distance.
        # Second dimension: Time.
        # Third dimension: RGB tuple.
        len_time = 1000
        len_dist = distance_bins
        image = np.zeros((len_dist, len_time, 3), dtype=np.uint8)

        # Helper functions calculating the indices.
        def _time_index(value):
            frac = np.clip((value - starttime) / duration, 0, 1)
            return int(round(frac * (len_time - 1)))

        def _space_index(value):
            frac = np.clip(
                (value - min_epicentral_distance) / epicentral_range, 0, 1)
            return int(round(frac * (len_dist - 1)))

        def _color_index(channel):
            _map = {"Z": 2, "N": 1, "E": 0}
            channel = channel[-1].upper()
            if channel not in _map:
                raise ValueError
            return _map[channel]

        for station in window_manager:
            for channel in window_manager[station]:
                for win in window_manager[station][channel]:
                    image[
                        _space_index(stations[station]["epicentral_distance"]),
                        _time_index(win[0]):_time_index(win[1]),
                        _color_index(channel), ] = 255

        # From http://colorbrewer2.org/
        color_map = {
            (255, 0, 0): (228, 26, 28),  # red
            (0, 255, 0): (77, 175, 74),  # green
            (0, 0, 255): (55, 126, 184),  # blue
            (255, 0, 255): (152, 78, 163),  # purple
            (0, 255, 255): (255, 127, 0),  # orange
            (255, 255, 0): (255, 255, 51),  # yellow
            (255, 255, 255): (250, 250, 250),  # white
            (0, 0, 0): (50, 50, 50),  # More pleasent gray background
        }

        # Replace colors...fairly complex. Not sure if there is another way...
        red, green, blue = image[:, :, 0], image[:, :, 1], image[:, :, 2]
        for color, replacement in color_map.items():
            image[:, :, :][(red == color[0]) & (green == color[1]) &
                           (blue == color[2])] = replacement

        def _one(i):
            return [_i / 255.0 for _i in i]

        import matplotlib.pylab as plt

        plt.style.use("ggplot")

        artists = [
            plt.Rectangle((0, 1), 1, 1, color=_one(color_map[(0, 0, 255)])),
            plt.Rectangle((0, 1), 1, 1, color=_one(color_map[(0, 255, 0)])),
            plt.Rectangle((0, 1), 1, 1, color=_one(color_map[(255, 0, 0)])),
            plt.Rectangle((0, 1), 1, 1, color=_one(color_map[(0, 255, 255)])),
            plt.Rectangle((0, 1), 1, 1, color=_one(color_map[(255, 0, 255)])),
            plt.Rectangle((0, 1), 1, 1, color=_one(color_map[(255, 255, 0)])),
            plt.Rectangle((0, 1), 1, 1,
                          color=_one(color_map[(255, 255, 255)])),
        ]
        labels = ["Z", "N", "E", "Z + N", "Z + E", "N + E", "Z + N + E"]

        if ax is None:
            plt.figure(figsize=(16, 9))
            ax = plt.gca()

        ax.imshow(
            image,
            aspect="auto",
            interpolation="nearest",
            vmin=0,
            vmax=255,
            origin="lower",
        )
        ax.grid()
        event_name = event["event_name"]
        ax.set_title(f"Selected windows for window set "
                     f"{window_set_name} and event "
                     f"{event_name}")

        ax.legend(artists,
                  labels,
                  loc="lower right",
                  title="Selected Components")

        # Set the x-ticks.
        xticks = []
        for time in ax.get_xticks():
            # They are offset by -0.5.
            time += 0.5
            # Convert to actual time
            frac = time / float(len_time)
            time = frac * duration
            xticks.append("%.1f" % time)
        ax.set_xticklabels(xticks)
        ax.set_xlabel("Time since event in seconds")

        yticks = []
        for dist in ax.get_yticks():
            # They are offset by -0.5.
            dist += 0.5
            # Convert to actual epicentral distance.
            frac = dist / float(len_dist)
            dist = min_epicentral_distance + (frac * epicentral_range)
            yticks.append("%.1f" % dist)
        ax.set_yticklabels(yticks)
        ax.set_ylabel(
            "Epicentral distance in degree [Binned in %i distances]" %
            distance_bins)

        if show:
            plt.tight_layout()
            plt.show()
            plt.close()

        return ax
Ejemplo n.º 18
0
def main(argv):
    inv = read_inventory("IRIS-ALL.xml")
    # if os.path.exists("IRIS-ALL.pkl"): # doesn't work on CentOS for some reason
    #     with open('IRIS-ALL.pkl', 'rb') as f:
    #         import cPickle as pkl
    #         inv = pkl.load(f)
    # else:
    #     inv = read_inventory("IRIS-ALL.xml")
    #     with open('IRIS-ALL.pkl', 'wb') as f:
    #         import pickle as pkl
    #         pkl.dump(inv, f, pkl.HIGHEST_PROTOCOL)
    sensorDict, responseDict = extract_unique_sensors_responses(inv)
    print('\nFound {0} response objects with keys: {1}'.format(len(responseDict.keys()), responseDict.keys()))

    # unknown stations in Indonesia are usually installed by Potsdam and we assume they have network name GE
    default_net = 'GE'
    ehb1 = read_eng('BMG.STN')
    ehb2 = read_eng('ISC.STN')
    ehb = np.unique(np.vstack((ehb1, ehb2)), axis=0)

    isc1 = read_isc('ehb.stn')
    isc2 = read_isc('iscehb.stn')
    isc = np.unique(np.vstack((isc1, isc2)), axis=0)

    catalogue = []
    for i in xrange(ehb.shape[0]):
        filed = False
        xml = False
        stn_found = isc[isc[:, 0] == ehb[i, 0], :]
        min_dist = 10e10
        if stn_found.shape[0] > 0:
            if stn_found.shape[0] > 1:
                for j in xrange(stn_found.shape[0]):
                    dist = locations2degrees(np.float(stn_found[j, 2]), np.float(stn_found[j, 3]), np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if dist < min_dist:
                        min_dist = dist
                        record = stn_found[j, :]
            else:
                min_dist = locations2degrees(np.float(stn_found[0, 2]), np.float(stn_found[0, 3]), np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                record = stn_found[0, :]

            #                Now we try to find the same station in XML file
            #                if min_dist > 1. or stn_found.shape[0]==0:

        xstn_found = inv.select(station=ehb[i, 0], channel="*HZ")

        if len(stn_found) == 0 and len(xstn_found) == 0:
            # we filed to find station anywhere and assign dummy values
            record = [ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'SHZ', '1964-1-1 00:00:00',
                      '2599-12-31 23:59:59']
            min_dist = 0.
            filed = True
        else:
            # if station is found somehwere we try to iterate and see if XML has data giving it preference through adding extra value to min_dist found in ISC
            if len(xstn_found) > 0:
                #                        print "----------",len(xstn_found)
                #                        print xstn_found[0][0].latitude
                min_dist = min_dist + 0.1
                for j in xrange(len(xstn_found)):
                    dist = locations2degrees(xstn_found[j][0].latitude, xstn_found[j][0].longitude, np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if min_dist > dist:
                        min_dist = dist
                        record = xstn_found[j]
                        #                                print record
                        xml = True

                    # last defence if stations have been done but distance between declared and found locations are more than 1 degree
        if min_dist > 1:
            record = [ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'SHZ', '1964-1-1 00:00:00',
                      '2599-12-31 23:59:59']
            filed = True
        if xml:
            xml = False

        else:
            if filed:

                if len(record[7]) < 5:
                    record[7] = '2599-12-31 23:59:59'
                catalogue.append(record)

            else:

                stn_found = isc[(isc[:, 0] == record[0]) & (isc[:, 1] == record[1]), :]

                for k in xrange(stn_found.shape[0]):
                    net = Network(code=stn_found[k, 1], stations=[], description=' ')
                    if len(stn_found[k, 7]) < 5:
                        stn_found[k, 7] = '2599-12-31 23:59:59'
                    catalogue.append(stn_found[k, :])

    stn_found = np.unique(np.array(catalogue), axis=0)
    if len(stn_found[stn_found == '']) > 0 or len(stn_found[stn_found == ' ']) > 0:
        print
        "Some elements are empty, check the list"

    # we composed our inventory. However some stations from ISC list can be left behind. We check if some stations in ISC are forgotten
    lost = []
    for j in xrange(isc.shape[0]):
        # is there any common station name?
        common_st = stn_found[isc[j, 0] == stn_found[:, 0]]
        if common_st.shape[0] > 0:
            # is network code the same?
            common_net = common_st[common_st[:, 1] == isc[j, 1]]
            if common_net.shape[0] < 1:
                # ok we found forgotten one, check the XML
                if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                    # Bingo...
                    lost.append(isc[j, :])
        else:
            if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                # Bingo...
                lost.append(isc[j, :])

    stn_found = np.vstack((stn_found, np.array(lost)))

    netDict = defaultdict(list)
    for k in xrange(stn_found.shape[0]):
        result = inv.select(network=stn_found[k, 1])
        if (len(result.networks)):
            net = result.networks[0]
            net.stations = []
        else:
            net = Network(code=stn_found[k, 1], stations=[], description=' ')

        # print stn_found[k, 1]

        if len(stn_found[k, 7]) < 5:
            stn_found[k, 7] = '2599-12-31 23:59:59'
        catalogue.append(stn_found[k, :])
        sta = Station(code=stn_found[k, 0], creation_date=utcdatetime.UTCDateTime(stn_found[k, 6]), \
                      termination_date=utcdatetime.UTCDateTime(stn_found[k, 7]), \
                      site=Site(name=' '), \
                      latitude=np.float(stn_found[k, 2]), \
                      longitude=np.float(stn_found[k, 3]), \
                      elevation=np.float(stn_found[k, 4]))

        if (stn_found[k, 5] in responseDict.keys()):
            r = responseDict[stn_found[k, 5]]

            cha = Channel(code=stn_found[k, 5], \
                          depth=0., \
                          azimuth=0., \
                          dip=-90., \
                          location_code='', \
                          latitude=np.float(stn_found[k, 2]), \
                          longitude=np.float(stn_found[k, 3]), \
                          elevation=np.float(stn_found[k, 4]), \
                          # sensor=sensorDict[stn_found[k,5]], \
                          response=r)

            sta.channels.append(cha)

            if (type(netDict[stn_found[k, 1]]) == Network):
                netDict[stn_found[k, 1]].stations.append(sta)
            else:
                net.stations.append(sta)
                netDict[stn_found[k, 1]] = net

            #                 print 'np',stn_found[k,:]
            # end if

    our_xml = Inventory(networks=netDict.values(), source='EHB')

    print 'Writing output files..'
    for inet, net in enumerate(our_xml.networks):
        currInv = Inventory(networks=[net], source='EHB')
        currInv.write("output/station.%d.xml" % (inet), format="stationxml", validate=True)

    # our_xml.write("station.xml",format="stationxml", validate=True)
    our_xml.write("station.txt", format="stationtxt")
Ejemplo n.º 19
0
def create_travel_time_dataframe(streams, catalog_file, ddepth, ddist, model):
    """
    Creates a travel time dataframe, which contains the phase arrrival times
    for each station the StreamCollection, for each event in the catalog.
    This uses an interpolation method to save time, and the fineness of the
    interpolation grid can be adjusted using the ddepth and ddist parameters.
    Using the recommended values of ddepth=5 and ddist=0.1 are generally
    sufficient to achieve less than 0.1 seconds of error in the travel times,
    for most cases.

    Args:
        streams (StreamCollection):
            Streams to calculate travel times for.
        catalog_file (str):
            The path to the CSV file (from ComCat) which contains event info.
        ddepth (float):
            The depth spacing (in km) for the interpolation grid.
            Recommended value is 5 km.
        ddist (float):
            The distance spacing (in decimal degrees) for the interpolation
            grid. Recommend value is 0.1 degrees.

    Retuns:
        A tuple, containing the travel time dataframe and the catalog
        (list of ScalarEvent objects).
    """

    # Read the catalog file and create a catalog (list) of ScalarEvent objects
    df_catalog = pd.read_csv(catalog_file)

    # Replace any negative depths with 0
    df_catalog['depth'].clip(lower=0, inplace=True)
    catalog = []
    for idx, row in df_catalog.iterrows():
        event = ScalarEvent()
        event.fromParams(row['id'], row['time'], row['latitude'],
                         row['longitude'], row['depth'], row['mag'])
        catalog.append(event)

    # Store the lat, lon, and id for each stream
    st_lats, st_lons, st_ids = [], [], []
    for st in streams:
        st_lats.append(st[0].stats.coordinates.latitude)
        st_lons.append(st[0].stats.coordinates.longitude)
        st_ids.append(st[0].stats.network + '.' + st[0].stats.station)

    # Calculate the distance for each stream, for each event
    # Store distances in a matrix
    distances_matrix = np.zeros((len(streams), len(catalog)))
    for idx, st in enumerate(streams):
        distances_matrix[idx] = locations2degrees(
            np.repeat(st_lats[idx], len(catalog)),
            np.repeat(st_lons[idx], len(catalog)),
            df_catalog['latitude'], df_catalog['longitude'])
    distances_matrix = distances_matrix.T

    # Calculate the minimum depth/distance values for the inteprolation grid
    # This includes a buffer to avoid interpolating at the endpoints
    # Make sure that the minimum depth/distance values aren't negative
    minimum_depth = max([0, min(df_catalog['depth']) - 2 * ddepth])
    minimum_dist = max([0, distances_matrix.min() - 2 * ddist])
    depth_grid = np.arange(
        minimum_depth, max(df_catalog['depth']) + 2 * ddepth, ddepth)
    distance_grid = np.arange(
        minimum_dist, distances_matrix.max() + 2 * ddist, ddist)

    # For each distance and each depth, compute the travel time
    # Store values in the "times" 2D matrix
    taupy_model = TauPyModel(model)
    times = np.zeros((len(depth_grid), len(distance_grid)))
    for i, depth in enumerate(depth_grid):
        for j, dist in enumerate(distance_grid):
            arrivals = taupy_model.get_travel_times(
                depth, dist, ['p', 'P', 'Pn'])
            if not arrivals:
                times[i][j] = np.nan
            else:
                times[i][j] = arrivals[0].time

    # Use 2D interpolation to interpolate values at the actual points
    points = np.transpose([np.tile(distance_grid, len(depth_grid)),
                           np.repeat(depth_grid, len(distance_grid))])
    new_points = np.vstack(
        (distances_matrix.flatten(),
         np.repeat(df_catalog['depth'], len(streams)))).T
    interpolated_times = griddata(points, times.flatten(), new_points).reshape(
        (-1, len(streams)))
    utcdatetimes = np.array([UTCDateTime(time) for time in df_catalog['time']])
    interpolated_times = utcdatetimes.reshape(-1, 1) + interpolated_times

    # Store travel time information in a DataFrame
    # Column indicies are the station ids, rows are the earthquake ids
    df = pd.DataFrame(data=interpolated_times, index=df_catalog['id'],
                      columns=st_ids)

    # Remove any duplicate columns which might result from a station with
    # multiple instruments
    df = df.loc[:, ~df.columns.duplicated()]
    return df, catalog
def plot_great_circle_path(ev_lon, ev_lat, ev_depth, center_lon, center_lat, baz, great_cricle_dist, model):

    plt.figure(num=1, figsize=(17, 10), dpi=100)  # define plot size in inches (width, height) & resolution(DPI)

    distance = locations2degrees(center_lat, center_lon, ev_lat, ev_lon)
    # print(distance)
    # earthmodel =  TauPyModel(model="ak135")
    arrivals = model.get_pierce_points(ev_depth, distance, phase_list=["PP"])
    # print(arrivals)
    arrival = arrivals[0]
    pierce_info = arrival.pierce
    # print(pierce_info)
    max_index = 0.0
    count = 0.0
    max_val = 0.0
    for i_index in pierce_info:
        # print(i_index)
        count += 1
        if i_index[3] > max_val:
            max_val = i_index[3]
            max_index = count - 1
            # print(max_index)

    # print(max_index)
    bounce_vect = pierce_info[max_index]
    bounce_dist = bounce_vect[2] / 0.017455053237912375  # convert from radians to degrees
    # print("bounce_dist")
    # print(bounce_dist)

    # print("ev_lat")
    # print(ev_lat)
    # print("ev_lon")
    # print(ev_lon)
    # print("center_lon")
    # print(center_lon)
    # print("center_lat")
    # print(center_lat)
    # print("backazimuth")
    # print(baz)

    # bounce point approximation
    bounce_lat_appx, bounce_lon_appx = midpoint(ev_lat, ev_lon, center_lat, center_lon)

    # putting everything into a vector
    lons = [ev_lon, center_lon]
    lats = [ev_lat, center_lat]

    # trick - the basemap functions does not like the arguments that math gives
    resolution = 0.0001
    bounce_lon_appx = np.round(bounce_lon_appx / resolution) * resolution
    bounce_lat_appx = np.round(bounce_lat_appx / resolution) * resolution

    # print(bounce_lon_appx)
    # print(bounce_lat_appx)

    # plot results
    map = Basemap(projection="hammer", lon_0=bounce_lon_appx, lat_0=bounce_lat_appx, resolution="c")
    map.drawcoastlines()
    # map.fillcontinents()
    # map.drawmapboundary()
    map.fillcontinents(color="#cc9966", lake_color="#99ffff")
    map.drawmapboundary(fill_color="#99ffff")
    great_cricle_dist_deg = great_cricle_dist / (1000 * KM_PER_DEG)
    # plt.title('Bounce point plot',fontsize=26)
    msg = "Great circle distance is %.2f deg" % great_cricle_dist_deg
    plt.title(msg)

    # draw great circle path
    map.drawgreatcircle(ev_lon, ev_lat, center_lon, center_lat, linewidth=3, color="g")

    # plot event
    x, y = map(ev_lon, ev_lat)
    map.scatter(x, y, 200, marker="*", color="k", zorder=10)

    # plot receiver
    x, y = map(center_lon, center_lat)
    map.scatter(x, y, 100, marker="^", color="k", zorder=10)

    # plot the bounce point approximated
    x, y = map(bounce_lon_appx, bounce_lat_appx)
    map.scatter(x, y, 100, marker="D", color="k", zorder=10)
# set the data window
STARTTIME = UTCDateTime(EQTIME)
DURATION = 1800
PSTART = 1125
PEND = 1375
SAMESCALE = False

# Home station
NETWORK = 'AM'  # AM = RaspberryShake network
STATION = "R7FA5"  # Station code of local station to plot
STA_LAT = 50.2609  # Latitude of local station
STA_LON = -5.0434  # Longitude of local station
CHANNEL = 'EHZ'  # channel to grab data for (e.g. EHZ, SHZ, EHE, EHN)
LOCATION = "TRURO SCHOOL"
DISTANCE = locations2degrees(EQLAT, EQLON, STA_LAT,
                             STA_LON)  # Station dist in degrees from epicentre
STA_DIST, _, _ = gps2dist_azimuth(STA_LAT, STA_LON, EQLAT,
                                  EQLON)  # Station dist in m from epicentre

# list of stations
SEISLIST = [
    'RB30C', 'RB5E8', 'RD93E', 'R82BD', 'R7FA5', 'R0353', 'R9FEE', 'R303A'
]
LOCATIONS = [
    'Falmouth', 'Penzance', 'Redruth', 'Richard Lander', 'Truro School',
    'Penair', 'Truro High', 'Constantine'
]
LATITUDES = [
    50.1486, 50.1179833, 50.2344, 50.2596, 50.2609, 50.2673, 50.2570, 50.117
]
LONGITUDES = [
Ejemplo n.º 22
0
# Calculated constants

PHASES = [
    "P", "pP", "PP", "S", "Pdiff", "PKP", "PKIKP", "PcP", "ScP", "ScS",
    "PKiKP", "SKiKP", "SKP", "SKS"
]  # All phases. Good for distant quakes

if PLOT_PS_ONLY:
    PHASES = ["P", "S"]  # Plot only S and P good for local quakes

EVT_TIME = UTCDateTime(EVT_TIME)
STARTTIME = EVT_TIME + ARRIVAL_TIME_ADJUST
ENDTIME = STARTTIME + DURATION
STA_DIST, _, BEARING = gps2dist_azimuth(
    STA_LAT, STA_LON, EVT_LAT, EVT_LON)  # Station dist in m from epicenter
STA_DEGREES = locations2degrees(EVT_LAT, EVT_LON, float(STA_LAT),
                                float(STA_LON))
CHANNEL_SET = [CHANNEL]
COLORS = [cm.plasma(x) for x in linspace(0, 0.8, len(PHASES))
          ]  # Colors from 0.8-1.0 are not very visible
DEG_DIST_TITLE = "Km DegDist: " + str(round(STA_DEGREES, 1))

EVT_TIME = EVT_TIME.strftime('%b %d, %Y %H:%M:%S')
MODEL = 'iasp91'  # Velocity model to predict travel-times through

# Intialize Other items

FILTERLABEL = ""
PHASE_PLOT_FILENAME = ''
ACCEL_PLOT_FILENAME = ''
GLOBE_PLOT_FILENAME = ''
maxamp = []
Ejemplo n.º 23
0
def sdxtoquakeml(sdx_dir,
                 out_xml,
                 time_uncertainties=[0.1, 0.2, 0.5, 0.8, 1.5],
                 catalog_description="",
                 catalog_version="",
                 agency_id="",
                 author="",
                 vel_mod_id=""):
    """
    Convert SDX to QuakeML format using ObsPy inventory structure.
    SDX filename prefix is stored under event description.
    Input parameters:
        - sdx_dir: directory containing sdx files (required)
        - out_xml: Filename of quakeML file (required)
        - time_uncertainties: List containing time uncertainities in seconds
          for mapping from weights 0-4, respectively (optional)
        - catalog_description (optional)
        - cat_agency_id (optional)
        - author (optional)
        - vel_mod_id (optional)
    Output:
        - xml catalog in QuakeML format.
    """

    # Prepare catalog
    cat = Catalog(description=catalog_description,
                  creation_info=CreationInfo(author=author,
                                             agency_id=agency_id,
                                             version=catalog_version))

    # Read in sdx files in directory, recursively
    files = glob.glob("{:}/**/*.sdx".format(sdx_dir), recursive=True)
    if len(files) == 0:
        print("No SDX files found in path. Exiting")
    for sdx_file_path in files:
        print("Working on ", sdx_file_path.split('/')[-1])

        # Set-up event
        evt_id = (sdx_file_path.split('/')[-1])[:-4]
        event = Event(event_type="earthquake",
                      creation_info=CreationInfo(author=author,
                                                 agency_id=agency_id),
                      event_descriptions=[EventDescription(text=evt_id)])

        # Get station details, append to arrays
        sdx_file = open(sdx_file_path, "r")
        stations = []
        for line in sdx_file:
            if line.rstrip() == "station":
                sdxstation = list(islice(sdx_file, 5))
                stations.append([
                    sdxstation[1].split()[0],
                    float(sdxstation[2].split()[0]),
                    float(sdxstation[3].split()[0]),
                    float(sdxstation[4].split()[0])
                ])
        sdx_file.close()

        # Find origin details, append to origin object
        sdx_file = open(sdx_file_path, "r")
        found_origin = False
        for line in sdx_file:
            if line.rstrip() == "origin":
                found_origin = True
                sdxorigin = list(islice(sdx_file, 17))
                orig_time = ("{:}T{:}".format(
                    sdxorigin[1][0:10].replace(".", "-"), sdxorigin[1][11:23]))
                evt_lat = float(sdxorigin[2].split()[0])
                evt_lon = float(sdxorigin[3].split()[0])
                evt_depth = float(sdxorigin[4].split()[0])
                creation_time = UTCDateTime("{:}T{:}".format(
                    sdxorigin[16].split()[6][0:10].replace(".", "-"),
                    sdxorigin[16].split()[6][11:23]))
                num_arrivals = int(sdxorigin[12].split()[0])
                num_arrivals_p = (int(sdxorigin[12].split()[0]) -
                                  int(sdxorigin[12].split()[1]))
                min_dist = float(sdxorigin[12].split()[9])
                max_dist = float(sdxorigin[12].split()[10])
                med_dist = float(sdxorigin[12].split()[11])
                max_az_gap = float(sdxorigin[12].split()[6])

                origin = Origin(time=UTCDateTime(orig_time),
                                longitude=evt_lon,
                                latitude=evt_lat,
                                depth=evt_depth * -1000,
                                earth_model_id=vel_mod_id,
                                origin_type="hypocenter",
                                evaluation_mode="manual",
                                evaluation_status="confirmed",
                                method_id=ResourceIdentifier(id="SDX_hypo71"),
                                creation_info=CreationInfo(
                                    creation_time=creation_time,
                                    author=author,
                                    agency_id=agency_id),
                                quality=OriginQuality(
                                    associated_phase_count=num_arrivals,
                                    used_phase_count=num_arrivals,
                                    associated_station_count=num_arrivals_p,
                                    used_station_count=num_arrivals_p,
                                    azimuthal_gap=max_az_gap,
                                    minimum_distance=min_dist,
                                    maximum_distance=max_dist,
                                    median_distance=med_dist))
                event.origins.append(origin)

        sdx_file.close()

        # Skip event if no computed origin
        if found_origin is False:
            print("No origin found ... skipping event")
            continue

        # Get pick details, append to pick and arrival objects
        sdx_file = open(sdx_file_path, "r")
        found_pick = False
        for line in sdx_file:
            if line.rstrip() == "pick":
                found_pick = True
                sdxpick = list(islice(sdx_file, 15))
                pick_time = UTCDateTime("{:}T{:}".format(
                    sdxpick[1][0:10].replace(".", "-"), sdxpick[1][11:23]))
                network = sdxpick[2].split()[0]
                station = sdxpick[2].split()[1]
                location = sdxpick[2].split()[2]
                if "NOT_SET" in location:
                    location = ""
                channel = sdxpick[2].split()[3]
                onset = sdxpick[8].split()[0]
                if onset == "0":
                    pickonset = "emergent"
                elif onset == "1":
                    pickonset = "impulsive"
                elif onset == "2":
                    pickonset = "questionable"
                phase = sdxpick[9].split()[0]
                polarity = sdxpick[10].split()[0]
                if polarity == "0":
                    pol = "positive"
                elif polarity == "1":
                    pol = "negative"
                elif polarity == "2":
                    pol = "undecidable"
                weight = int(sdxpick[11].split()[0])
                creation_time = UTCDateTime("{:}T{:}".format(
                    sdxpick[14].split()[6][0:10].replace(".", "-"),
                    sdxpick[14].split()[6][11:23]))
                pick = Pick(
                    time=pick_time,
                    waveform_id=WaveformStreamID(network_code=network,
                                                 station_code=station,
                                                 location_code=location,
                                                 channel_code=channel),
                    time_errors=time_uncertainties[weight],
                    evaluation_mode="manual",
                    evaluation_status="confirmed",
                    onset=pickonset,
                    phase_hint=phase,
                    polarity=pol,
                    method_id=ResourceIdentifier(id="SDX"),
                    creation_info=CreationInfo(creation_time=creation_time))
                event.picks.append(pick)

                # Compute azimuth, distance, append to arrival object
                for i in range(0, len(stations)):
                    if stations[i][0] == station:
                        azimuth = (gps2dist_azimuth(evt_lat, evt_lon,
                                                    stations[i][1],
                                                    stations[i][2])[1])
                        dist_deg = locations2degrees(evt_lat, evt_lon,
                                                     stations[i][1],
                                                     stations[i][2])
                        arrival = Arrival(phase=phase,
                                          pick_id=pick.resource_id,
                                          azimuth=azimuth,
                                          distance=dist_deg,
                                          time_weight=1.00)
                        event.origins[0].arrivals.append(arrival)

        # Skip event if no picks
        if found_pick is False:
            print("No picks found ... skipping event")
            continue

        # Set preferred origin and append event to catalogue
        event.preferred_origin_id = event.origins[0].resource_id
        cat.events.append(event)

        sdx_file.close()

    cat.write(out_xml, format="QUAKEML")
Ejemplo n.º 24
0
EQLON = 113.116
EQTIME = "2020-02-05 18:12:37"
EQZ=589.6
STARTTIME = UTCDateTime(EQTIME)
PSTART = 500
PEND = 3500
DURATION = PEND-PSTART

# Home station
NETWORK = 'AM'   # AM = RaspberryShake network
STATION = "RD5F3"  # Station code of local station to plot
STA_LAT = 34.76576577   # Latitude of local station  
STA_LON = -112.5250159  # Longitude of local station
CHANNEL = 'EHZ'  # channel to grab data for (e.g. EHZ, SHZ, EHE, EHN)
LOCATION = "Chino Valley"
DISTANCE=locations2degrees(EQLAT, EQLON, STA_LAT, STA_LON) # Station dist in degrees from epicentre
STA_DIST, _, _ = gps2dist_azimuth(STA_LAT, STA_LON, EQLAT, EQLON)   # Station dist in m from epicentre

# Pretty paired colors. Reorder to have saturated colors first and remove some colors at the end.
CMAP = get_cmap('Paired', lut=12)
COLORS = ['#%02x%02x%02x' % tuple(int(col * 255) for col in CMAP(i)[:3]) for i in range(12)]
COLORS = COLORS[1:][::2][:-1] + COLORS[::2][:-1]

# get the data, detrend and filter
st=client.get_waveforms('AM',STATION,'00','EHZ',STARTTIME+PSTART,STARTTIME+PEND)
st.merge(method=0, fill_value='latest')
st.detrend(type='demean')
st.filter("bandpass", freqmin=0.8, freqmax = 3.0, corners=2, zerophase=True)
FILTERLABEL = 'bandpass, freqmin=0.8, freqmax=3.0'

# Plot figure with subplots of different sizes
Ejemplo n.º 25
0
def sdxtoquakeml(sdx_dir, out_xml,
                 time_uncertainties=[0.1, 0.2, 0.5, 0.8, 1.5],
                 catalog_description="", catalog_version="",
                 agency_id="", author="", vel_mod_id=""):
    """
    Convert SDX to QuakeML format using ObsPy inventory structure.
    SDX filename prefix is stored under event description.
    Input parameters:
        - sdx_dir: directory containing sdx files (required)
        - out_xml: Filename of quakeML file (required)
        - time_uncertainties: List containing time uncertainities in seconds
          for mapping from weights 0-4, respectively (optional)
        - catalog_description (optional)
        - cat_agency_id (optional)
        - author (optional)
        - vel_mod_id (optional)
    Output:
        - xml catalog in QuakeML format.
    """

    # Prepare catalog
    cat = Catalog(description=catalog_description,
                  creation_info=CreationInfo(
                      author=author, agency_id=agency_id,
                      version=catalog_version))

    # Read in sdx files in directory, recursively
    files = glob.glob("{:}/**/*.sdx".format(sdx_dir), recursive=True)
    if len(files) == 0:
        print("No SDX files found in path. Exiting")
    for sdx_file_path in files:
        print("Working on ", sdx_file_path.split('/')[-1])

        # Set-up event
        evt_id = (sdx_file_path.split('/')[-1])[:-4]
        event = Event(event_type="earthquake", creation_info=CreationInfo(
            author=author, agency_id=agency_id),
            event_descriptions=[EventDescription(text=evt_id)])

        # Get station details, append to arrays
        sdx_file = open(sdx_file_path, "r")
        stations = []
        for line in sdx_file:
            if line.rstrip() == "station":
                sdxstation = list(islice(sdx_file, 5))
                stations.append([sdxstation[1].split()[0],
                                 float(sdxstation[2].split()[0]),
                                 float(sdxstation[3].split()[0]),
                                 float(sdxstation[4].split()[0])])
        sdx_file.close()

        # Find origin details, append to origin object
        sdx_file = open(sdx_file_path, "r")
        found_origin = False
        for line in sdx_file:
            if line.rstrip() == "origin":
                found_origin = True
                sdxorigin = list(islice(sdx_file, 17))
                orig_time = ("{:}T{:}".
                             format(sdxorigin[1][0:10].replace(".", "-"),
                                    sdxorigin[1][11:23]))
                evt_lat = float(sdxorigin[2].split()[0])
                evt_lon = float(sdxorigin[3].split()[0])
                evt_depth = float(sdxorigin[4].split()[0])
                creation_time = UTCDateTime(
                    "{:}T{:}".format(sdxorigin[16].split()[6][0:10]
                                     .replace(".", "-"),
                                     sdxorigin[16].split()[6][11:23]))
                num_arrivals = int(sdxorigin[12].split()[0])
                num_arrivals_p = (int(sdxorigin[12].split()[0]) -
                                  int(sdxorigin[12].split()[1]))
                min_dist = float(sdxorigin[12].split()[9])
                max_dist = float(sdxorigin[12].split()[10])
                med_dist = float(sdxorigin[12].split()[11])
                max_az_gap = float(sdxorigin[12].split()[6])

                origin = Origin(time=UTCDateTime(orig_time), longitude=evt_lon,
                                latitude=evt_lat, depth=evt_depth*-1000,
                                earth_model_id=vel_mod_id,
                                origin_type="hypocenter",
                                evaluation_mode="manual",
                                evaluation_status="confirmed",
                                method_id=ResourceIdentifier(id="SDX_hypo71"),
                                creation_info=CreationInfo(
                                    creation_time=creation_time, author=author,
                                    agency_id=agency_id),
                                quality=OriginQuality(
                                    associated_phase_count=num_arrivals,
                                    used_phase_count=num_arrivals,
                                    associated_station_count=num_arrivals_p,
                                    used_station_count=num_arrivals_p,
                                    azimuthal_gap=max_az_gap,
                                    minimum_distance=min_dist,
                                    maximum_distance=max_dist,
                                    median_distance=med_dist))
                event.origins.append(origin)

        sdx_file.close()

        # Skip event if no computed origin
        if found_origin is False:
            print("No origin found ... skipping event")
            continue

        # Get pick details, append to pick and arrival objects
        sdx_file = open(sdx_file_path, "r")
        found_pick = False
        for line in sdx_file:
            if line.rstrip() == "pick":
                found_pick = True
                sdxpick = list(islice(sdx_file, 15))
                pick_time = UTCDateTime(
                    "{:}T{:}".format(sdxpick[1][0:10].replace(".", "-"),
                                     sdxpick[1][11:23]))
                network = sdxpick[2].split()[0]
                station = sdxpick[2].split()[1]
                location = sdxpick[2].split()[2]
                if "NOT_SET" in location:
                    location = ""
                channel = sdxpick[2].split()[3]
                onset = sdxpick[8].split()[0]
                if onset == "0":
                    pickonset = "emergent"
                elif onset == "1":
                    pickonset = "impulsive"
                elif onset == "2":
                    pickonset = "questionable"
                phase = sdxpick[9].split()[0]
                polarity = sdxpick[10].split()[0]
                if polarity == "0":
                    pol = "positive"
                elif polarity == "1":
                    pol = "negative"
                elif polarity == "2":
                    pol = "undecidable"
                weight = int(sdxpick[11].split()[0])
                creation_time = UTCDateTime(
                    "{:}T{:}".format(sdxpick[14].split()[6][0:10]
                                     .replace(".", "-"),
                                     sdxpick[14].split()[6][11:23]))
                pick = Pick(time=pick_time,
                            waveform_id=WaveformStreamID(
                                network_code=network, station_code=station,
                                location_code=location, channel_code=channel),
                            time_errors=time_uncertainties[weight],
                            evaluation_mode="manual",
                            evaluation_status="confirmed", onset=pickonset,
                            phase_hint=phase, polarity=pol,
                            method_id=ResourceIdentifier(id="SDX"),
                            creation_info=CreationInfo(
                                creation_time=creation_time))
                event.picks.append(pick)

                # Compute azimuth, distance, append to arrival object
                for i in range(0, len(stations)):
                    if stations[i][0] == station:
                        azimuth = (gps2dist_azimuth(evt_lat, evt_lon,
                                                    stations[i][1],
                                                    stations[i][2])[1])
                        dist_deg = locations2degrees(evt_lat, evt_lon,
                                                     stations[i][1],
                                                     stations[i][2])
                        arrival = Arrival(phase=phase,
                                          pick_id=pick.resource_id,
                                          azimuth=azimuth, distance=dist_deg,
                                          time_weight=1.00)
                        event.origins[0].arrivals.append(arrival)

        # Skip event if no picks
        if found_pick is False:
            print("No picks found ... skipping event")
            continue

        # Set preferred origin and append event to catalogue
        event.preferred_origin_id = event.origins[0].resource_id
        cat.events.append(event)

        sdx_file.close()

    cat.write(out_xml, format="QUAKEML")
model = TauPyModel(model="iasp91")

# to plot up all the stations...
#inventory.plot()

# to get the station coordinates
station_coordinates = []
for network in inventory:
    for station in network:
        station_coordinates.append((network.code, station.code, 
                                    station.latitude, station.longitude, 
                                    station.elevation))


for station in station_coordinates:        
    DegDist = locations2degrees(eventLat, eventLon,
                                station[2], station[3])
    StationAziExpec = gps2dist_azimuth(eventLat, eventLon,
                                       station[2], station[3])
    arrivals = model.get_travel_times(source_depth_in_km = eventDepth,
                                      distance_in_degree=DegDist,
                                      phase_list = ["P"])
    if DegDist > 25 and DegDist < 90:

        arrTime=eventTime + arrivals[0].time    
        bTime=arrTime-200
        eTime=arrTime+300
        try:
            st = client.get_waveforms(station[0],station[1],"00","BH?",
                                      bTime,eTime,attach_response=True)
        except:
            print("No data for station "+station[1])
Ejemplo n.º 27
0
    def plot_windows(self, event, iteration, distance_bins=500,
                     ax=None, show=True):
        """
        Plot all selected windows on a epicentral distance vs duration plot
        with the color encoding the selected channels. This gives a quick
        overview of how well selected the windows for a certain event and
        iteration are.

        :param event: The event.
        :param iteration: The iteration.
        :param distance_bins: The number of bins on the epicentral
            distance axis.
        :param ax: If given, it will be plotted to this ax.
        :param show: If true, ``plt.show()`` will be called before returning.
        :return: The potentially created axes object.
        """
        from obspy.geodetics.base import locations2degrees

        event = self.comm.events.get(event)
        iteration = self.comm.iterations.get(iteration)
        pparam = iteration.get_process_params()
        window_manager = self.comm.windows.get(event, iteration)

        starttime = event["origin_time"]
        duration = (pparam["npts"] - 1) * pparam["dt"]

        # First step is to calculate all epicentral distances.
        stations = copy.deepcopy(self.comm.query.get_all_stations_for_event(
            event["event_name"]))
        for s in stations.values():
            s["epicentral_distance"] = locations2degrees(
                event["latitude"], event["longitude"], s["latitude"],
                s["longitude"])

        # Plot from 0 to however far it goes.
        min_epicentral_distance = 0
        max_epicentral_distance = math.ceil(max(
            _i["epicentral_distance"] for _i in stations.values()))
        epicentral_range = max_epicentral_distance - min_epicentral_distance

        if epicentral_range == 0:
            raise ValueError

        # Create the image that will represent the pictures in an epicentral
        # distance plot. By default everything is black.
        #
        # First dimension: Epicentral distance.
        # Second dimension: Time.
        # Third dimension: RGB tuple.
        len_time = 1000
        len_dist = distance_bins
        image = np.zeros((len_dist, len_time, 3), dtype=np.uint8)

        # Helper functions calculating the indices.
        def _time_index(value):
            frac = np.clip((value - starttime) / duration, 0, 1)
            return int(round(frac * (len_time - 1)))

        def _space_index(value):
            frac = np.clip(
                (value - min_epicentral_distance) / epicentral_range, 0, 1)
            return int(round(frac * (len_dist - 1)))

        def _color_index(channel):
            _map = {
                "Z": 2,
                "N": 1,
                "E": 0
            }
            channel = channel[-1].upper()
            if channel not in _map:
                raise ValueError
            return _map[channel]

        for channel in window_manager.list():
            station = ".".join(channel.split(".")[:2])
            for win in window_manager.get(channel):
                image[
                    _space_index(stations[station]["epicentral_distance"]),
                    _time_index(win.starttime):_time_index(win.endtime),
                    _color_index(channel)] = 255

        # From http://colorbrewer2.org/
        color_map = {
            (255, 0, 0): (228, 26, 28),  # red
            (0, 255, 0): (77, 175, 74),  # green
            (0, 0, 255): (55, 126, 184),  # blue
            (255, 0, 255): (152, 78, 163),  # purple
            (0, 255, 255): (255, 127, 0),  # orange
            (255, 255, 0): (255, 255, 51),  # yellow
            (255, 255, 255): (250, 250, 250),  # white
            (0, 0, 0): (50, 50, 50)  # More pleasent gray background
        }

        # Replace colors...fairly complex. Not sure if there is another way...
        red, green, blue = image[:, :, 0], image[:, :, 1], image[:, :, 2]
        for color, replacement in color_map.items():
            image[:, :, :][(red == color[0]) & (green == color[1]) &
                           (blue == color[2])] = replacement

        def _one(i):
            return [_i / 255.0 for _i in i]

        import matplotlib.pylab as plt
        plt.style.use("ggplot")

        artists = [
            plt.Rectangle((0, 1), 1, 1, color=_one(color_map[(0, 0, 255)])),
            plt.Rectangle((0, 1), 1, 1, color=_one(color_map[(0, 255, 0)])),
            plt.Rectangle((0, 1), 1, 1, color=_one(color_map[(255, 0, 0)])),
            plt.Rectangle((0, 1), 1, 1, color=_one(color_map[(0, 255, 255)])),
            plt.Rectangle((0, 1), 1, 1, color=_one(color_map[(255, 0, 255)])),
            plt.Rectangle((0, 1), 1, 1, color=_one(color_map[(255, 255, 0)])),
            plt.Rectangle((0, 1), 1, 1,
                          color=_one(color_map[(255, 255, 255)]))
        ]
        labels = [
            "Z",
            "N",
            "E",
            "Z + N",
            "Z + E",
            "N + E",
            "Z + N + E"
        ]

        if ax is None:
            plt.figure(figsize=(16, 9))
            ax = plt.gca()

        ax.imshow(image, aspect="auto", interpolation="nearest", vmin=0,
                  vmax=255, origin="lower")
        ax.grid()
        ax.set_title("Selected windows for iteration %s and event %s" % (
                     iteration.name, event["event_name"]))

        ax.legend(artists, labels, loc="lower right",
                  title="Selected Components")

        # Set the x-ticks.
        xticks = []
        for time in ax.get_xticks():
            # They are offset by -0.5.
            time += 0.5
            # Convert to actual time
            frac = time / float(len_time)
            time = frac * duration
            xticks.append("%.1f" % time)
        ax.set_xticklabels(xticks)
        ax.set_xlabel("Time since event in seconds")

        yticks = []
        for dist in ax.get_yticks():
            # They are offset by -0.5.
            dist += 0.5
            # Convert to actual epicentral distance.
            frac = dist / float(len_dist)
            dist = min_epicentral_distance + (frac * epicentral_range)
            yticks.append("%.1f" % dist)
        ax.set_yticklabels(yticks)
        ax.set_ylabel("Epicentral distance in degree [Binned in %i distances]"
                      % distance_bins)

        if show:
            plt.tight_layout()
            plt.show()
            plt.close()

        return ax
Ejemplo n.º 28
0
    def get_window_statistics(self, iteration, cache=True):
        """
        Get a dictionary with window statistics for an iteration per event.

        Depending on the size of your inversion and chosen iteration,
        this might take a while...

        :param iteration: The iteration for which to calculate everything.
        :param cache: Use cache (if available). Otherwise cached value will
            be deleted.
        """
        from obspy.geodetics.base import locations2degrees

        it = self.comm.iterations.get(iteration)

        cache_file = os.path.join(self._statistics_cache_folder,
                                  "window_statistics_iteration_%s.json" %
                                  it.name)

        if os.path.exists(cache_file):
            if cache is True:
                try:
                    with open(cache_file) as fh:
                        data = json.load(fh)
                except Exception as e:
                    print("Loading cache failed due to: %s" % str(e))
                print("Loading statistics from cache.")
                return data
            else:
                print("Removing existing cached file ...")
                os.remove(cache_file)

        statistics = {}

        for _i, event in enumerate(list(sorted(it.events.keys()))):
            print("Collecting statistics for event %i of %i ..." % (
                _i + 1, len(it.events)))

            wm = self.get(event=event, iteration=iteration)

            event_obj = self.comm.events.get(event)
            station_details = copy.deepcopy(
                self.comm.query.get_all_stations_for_event(event))

            component_window_count = {"E": 0, "N": 0, "Z": 0}
            component_length_sum = {"E": 0, "N": 0, "Z": 0}
            stations_with_windows_count = 0
            stations_without_windows_count = 0

            stations = {}

            for station in it.events[event]["stations"].keys():
                s = station_details[station]
                stations[station] = s

                s["epicentral_distance"] = locations2degrees(
                    event_obj["latitude"], event_obj["longitude"],
                    s["latitude"], s["longitude"])

                s["windows"] = {"Z": [], "E": [], "N": []}

                wins = wm.get_windows_for_station(station)
                has_windows = False
                for coll in wins:
                    component = coll.channel_id[-1].upper()
                    total_length = sum([_i.length for _i in coll.windows])
                    if not total_length:
                        continue
                    for win in coll.windows:
                        s["windows"][component].append(win.length)
                    has_windows = True
                    component_window_count[component] += 1
                    component_length_sum[component] += total_length
                if has_windows:
                    stations_with_windows_count += 1
                else:
                    stations_without_windows_count += 1

            statistics[event] = {
                "total_station_count": len(it.events[event]["stations"]),
                "stations_with_windows": stations_with_windows_count,
                "stations_without_windows": stations_without_windows_count,
                "stations_with_vertical_windows": component_window_count["Z"],
                "stations_with_north_windows": component_window_count["N"],
                "stations_with_east_windows": component_window_count["E"],
                "total_window_length": sum(component_length_sum.values()),
                "window_length_vertical_components": component_length_sum["Z"],
                "window_length_north_components": component_length_sum["N"],
                "window_length_east_components": component_length_sum["E"],
                "stations": stations
            }

        # Store in cache.
        with open(cache_file, "w") as fh:
            json.dump(statistics, fh)

        return statistics
Ejemplo n.º 29
0
def add_ortho(lats, lons, colors, CClim,
              central_longitude, central_latitude,
              text=None, size=50, marker=['o', 'd'],
              colormap='viridis', fig=None,
              rect=[0.0, 0.0, 1.0, 1.0]):
    if not fig:
        fig = plt.figure()

    proj = ccrs.Orthographic(central_longitude=central_longitude,
                             central_latitude=central_latitude)

    # left, bottom, width, height
    ax = fig.add_axes([rect[0],
                       rect[1] + rect[3] * 0.12,
                       rect[2],
                       rect[3] * 0.85],
                      projection=proj)
    cm_ax = fig.add_axes([rect[0],
                          rect[1],
                          rect[2],
                          rect[3] * 0.08])
    plt.sca(ax)

    # make the map global rather than have it zoom in to
    # the extents of any plotted data
    ax.set_global()

    ax.stock_img()
    ax.coastlines()
    ax.gridlines()

    lats_mark1 = []
    lons_mark1 = []
    colors_mark1 = []
    lats_mark2 = []
    lons_mark2 = []
    colors_mark2 = []

    cmap = get_cmap(colormap)
    cmap.set_under('grey')

    for lon, lat, color in zip(lons, lats, colors):
        if color > CClim:
            lats_mark1.append(lat)
            lons_mark1.append(lon)
            colors_mark1.append(color)
        else:
            lats_mark2.append(lat)
            lons_mark2.append(lon)
            colors_mark2.append(color)

    if len(lons_mark1) > 0:
        scatter = ax.scatter(lons_mark1, lats_mark1, s=size, c=colors_mark1,
                             marker=marker[0],
                             cmap=cmap, vmin=CClim, vmax=1, zorder=10,
                             transform=ccrs.Geodetic())

    if len(lons_mark2) > 0:
        scatter = ax.scatter(lons_mark2, lats_mark2, s=size, c=colors_mark2,
                             marker=marker[1],
                             cmap=cmap, vmin=CClim, vmax=1, zorder=10,
                             transform=ccrs.Geodetic())

    locator = MaxNLocator(5)

    cb = Colorbar(cm_ax, scatter, cmap=cmap,
                  orientation='horizontal',
                  ticks=locator,
                  extend='min')
    cb.set_label('CC')
    # Compat with old matplotlib versions.
    if hasattr(cb, "update_ticks"):
        cb.update_ticks()

    ax.plot(central_longitude, central_latitude, color='red', marker='*',
            markersize=np.sqrt(size))

    if (text):
        for lat, lon, text in zip(lats, lons, text):
            # Avoid plotting invisible texts. They clutter at the origin
            # otherwise
            dist = locations2degrees(lat, lon,
                                     central_latitude, central_longitude)
            if (dist < 90):
                plt.text(lon, lat, text, weight="heavy",
                         transform=ccrs.Geodetic(),
                         color="k", zorder=100,
                         path_effects=[
                             PathEffects.withStroke(linewidth=3,
                                                    foreground="white")])

    return ax
Ejemplo n.º 30
0
    def writeStations(self):
        '''Write station information to file, which can be loaded as a pandas dataframe'''

        ofname = 'Stations_%s_%s_%s_%s_%s_%s_mag_%s-%s_depth_%s-%s_km.dat' %(self.starttime,self.endtime,self.minlatitude,\
         self.minlongitude,self.maxlatitude,self.maxlongitude,self.minmag,self.maxmag,self.mindepth,self.maxdepth)

        outfile = open(ofname, 'w')

        try:

            for network in self.inventory:

                netname = network.code

                for station in network:

                    code = station.code
                    lat = station.latitude
                    lon = station.longitude
                    ele = station.elevation
                    stdate = station.start_date

                    if self.station_autoselect_flag == True:
                        #EK added 04/2019 to write only stations that we will later download
                        cnt = 0.

                        for event in self.quake_cat:

                            time = event.origins[0].time
                            evlat = event.origins[0].latitude
                            evlon = event.origins[0].longitude
                            dep = event.origins[0].depth / 1000.
                            mag = event.magnitudes[0].mag

                            ddeg = locations2degrees(evlat, evlon, lat, lon)
                            distance_m, az, baz = gps2dist_azimuth(
                                evlat, evlon, lat, lon)

                            theta = np.arctan2(distance_m, dep * 1000.)

                            if theta <= np.pi / 4:

                                arrivals = self.vmodel.get_travel_times(
                                    source_depth_in_km=dep,
                                    distance_in_degree=ddeg,
                                    phase_list=["s", "S"])

                                if len(arrivals) > 0:
                                    cnt = cnt + 1
                        if cnt > 0:

                            outfile.write(
                                "%s %s %s %s %s %s\n" %
                                (lon, lat, ele, netname, code, stdate))
                    else:
                        outfile.write("%s %s %s %s %s %s\n" %
                                      (lon, lat, ele, netname, code, stdate))

            outfile.close()

        except:

            print("Need to run fetchInventory before writing stations")
            sys.exit(1)
axis = [
    math.cos((90 + EQLON) / 180 * PI),
    math.sin((90 + EQLON) / 180 * PI), 0
]
theta = (90 - EQLAT) / 180 * PI
for i in range(len(dstart)):
    dstart[i] = np.dot(rotation_matrix(axis, theta), dstart[i])
    sstart = asSpherical(dstart[i])
    lat = 90 - (sstart[1])
    long = sstart[2]
    dstartt.append([lat, long])

linepoints = []
for lon in range(-180, 180, 10):
    # Northern hemisphere
    DISTANCE = locations2degrees(EQLAT, EQLON, 0,
                                 lon)  # Station dist in degrees from epicentre
    bestfit = abs(DISTANCE - 140)
    bestloc = [0, lon]
    plotit = False
    for lat in range(0, 91, 1):
        DISTANCE = locations2degrees(
            EQLAT, EQLON, lat, lon)  # Station dist in degrees from epicentre
        if DISTANCE > 139.6 and DISTANCE < 140.4:
            plotit = True
            for latf in range((lat - 1) * 300, (lat + 2) * 300, 1):
                DISTANCE = locations2degrees(EQLAT, EQLON, latf / 300, lon)
                fit = abs(DISTANCE - 140)
                if fit < bestfit:
                    bestfit = fit
                    bestloc = [latf / 300, lon]
    if plotit == True:
 else:
     eqz = quake['depth']
 eqmag = quake['mag']
 eqloc = quake['place']
 eqtime = quake['time']
 url = "'https://earthquake.usgs.gov/earthquakes/eventpage/" + quake[
     'id'] + "/executive'"
 eqname = "'M" + quake['mag'] + " - " + quake['place'] + "'"
 eqlat = float(quake['latitude'])
 eqlon = float(quake['longitude'])
 eqz = float(quake['depth'])
 #eqtime = "'" + quake['time'][0:10] + " " + quake['time'][11:23] + "'"
 file_stem = "'" + quake['place'].split(
     ", ")[-1] + "-" + quake['time'][0:10] + "'"
 distance = locations2degrees(
     float(quake['latitude']), float(quake['longitude']), STA_LAT,
     STA_LON)  # Station dist in degrees from epicentre
 sta_dist, _, _ = gps2dist_azimuth(
     STA_LAT, STA_LON, float(quake['latitude']),
     float(quake['longitude']))  # Station dist in m from epicentre
 print(quake['mag'] + "\t" + quake['place'] + "\t" +
       str(quake['latitude']) + "\t" + str(quake['longitude']) + "\t" +
       str(quake['depth']) + "\t" + quake['time'][0:10] + " " +
       quake['time'][11:23] + "\t" + str(sta_dist / 1000) + "\t" +
       str(distance) + "\t" +
       "https://earthquake.usgs.gov/earthquakes/eventpage/" + quake['id'] +
       "/executive")
 arrivals = model.get_travel_times(source_depth_in_km=float(quake['depth']),
                                   distance_in_degree=distance)
 first_arrival = float(str(arrivals[0]).split(" ")[4])
 # Read the seismic stream
Ejemplo n.º 33
0
def main(argv):
    '''@package isc2stnxml
       It gathers station information from all STN files provided in ISC and Engdahl catalogues assigning correct network code.
       When proper network code can not be identified the program just guess it, sorry...
    '''
    inv = read_inventory("IRIS-ALL.xml")

    # unknown stations in Indonesia are usually installed by Potsdam and we assume they have network name GE
    default_net = 'GE'
    ehb1 = read_eng('BMG.STN')
    ehb2 = read_eng('ISC.STN')
    ehb = np.unique(np.vstack((ehb1, ehb2)), axis=0)

    isc1 = read_isc('ehb.stn')
    isc2 = read_isc('iscehb.stn')
    isc = np.unique(np.vstack((isc1, isc2)), axis=0)

    catalogue = []
    our_xml = Inventory(networks=[], source='EHB')

    for i in xrange(ehb.shape[0]):
        filed = False
        xml = False
        stn_found = isc[isc[:, 0] == ehb[i, 0], :]
        min_dist = 10e10
        if stn_found.shape[0] > 0:
            if stn_found.shape[0] > 1:
                for j in xrange(stn_found.shape[0]):
                    dist = locations2degrees(np.float(stn_found[j, 2]),
                                             np.float(stn_found[j, 3]),
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if dist < min_dist:
                        min_dist = dist
                        record = stn_found[j, :]
            else:
                min_dist = locations2degrees(np.float(stn_found[0, 2]),
                                             np.float(stn_found[0, 3]),
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                record = stn_found[0, :]

#                Now we try to find the same station in XML file
#                if min_dist > 1. or stn_found.shape[0]==0:

        xstn_found = inv.select(station=ehb[i, 0], channel="*HZ")

        if len(stn_found) == 0 and len(xstn_found) == 0:
            # we filed to find station anywhere and assign dummy values
            record = [
                ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'Z',
                '1964-1-1 00:00:00', '2599-12-31 23:59:59'
            ]
            min_dist = 0.
            filed = True
        else:
            # if station is found somehwere we try to iterate and see if XML has data giving it preference through adding extra value to min_dist found in ISC
            if len(xstn_found) > 0:
                #                        print "----------",len(xstn_found)
                #                        print xstn_found[0][0].latitude
                min_dist = min_dist + 0.1
                for j in xrange(len(xstn_found)):
                    dist = locations2degrees(xstn_found[j][0].latitude,
                                             xstn_found[j][0].longitude,
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if min_dist > dist:
                        min_dist = dist
                        record = xstn_found[j]
                        #                                print record
                        xml = True

# last defence if stations have been done but distance between declared and found locations are more than 1 degree
        if min_dist > 1:
            record = [
                ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'Z',
                '1964-1-1 00:00:00', '2599-12-31 23:59:59'
            ]
            filed = True
        if xml:
            #our_xml.networks.append(record)
            xml = False

        else:
            if filed:

                if len(record[7]) < 5:
                    record[7] = '2599-12-31 23:59:59'
                catalogue.append(record)

            else:

                stn_found = isc[(isc[:, 0] == record[0]) &
                                (isc[:, 1] == record[1]), :]

                for k in xrange(stn_found.shape[0]):
                    net = Network(code=stn_found[k, 1],
                                  stations=[],
                                  description=' ')
                    if len(stn_found[k, 7]) < 5:
                        stn_found[k, 7] = '2599-12-31 23:59:59'
                    catalogue.append(stn_found[k, :])

    stn_found = np.unique(np.array(catalogue), axis=0)
    if len(stn_found[stn_found == '']) > 0 or len(
            stn_found[stn_found == ' ']) > 0:
        print "Some elements are empty, check the list"

    # we composed our inventory. However some stations from ISC list can be left behind. We check if some stations in ISC are forgotten
    lost = []
    for j in xrange(isc.shape[0]):
        # is there any common station name?
        common_st = stn_found[isc[j, 0] == stn_found[:, 0]]
        if common_st.shape[0] > 0:
            # is network code the same?
            common_net = common_st[common_st[:, 1] == isc[j, 1]]
            if common_net.shape[0] < 1:
                # ok we found forgotten one, check the XML
                if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                    # Bingo...
                    lost.append(isc[j, :])
        else:
            if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                # Bingo...
                lost.append(isc[j, :])

    stn_found = np.vstack((stn_found, np.array(lost)))

    for k in xrange(stn_found.shape[0]):

        net = Network(code=stn_found[k, 1], stations=[], description=' ')
        if len(stn_found[k, 7]) < 5:
            stn_found[k, 7] = '2599-12-31 23:59:59'
        catalogue.append(stn_found[k, :])
        sta=Station(code=stn_found[k,0],creation_date=utcdatetime.UTCDateTime(stn_found[k,6]), \
        termination_date=utcdatetime.UTCDateTime(stn_found[k,7]), \
        site=Site(name=' '), \
        latitude=np.float(stn_found[k,2]), \
        longitude=np.float(stn_found[k,3]), \
        elevation=np.float(stn_found[k,4]))

        cha=Channel(code=stn_found[k,5], \
        depth=0., \
        azimuth=0., \
        dip=-90., \
        location_code='', \
        latitude=np.float(stn_found[k,2]), \
        longitude=np.float(stn_found[k,3]), \
        elevation=np.float(stn_found[k,4]))

        sta.channels.append(cha)
        net.stations.append(sta)
        our_xml.networks.append(net)


#             print 'np',stn_found[k,:]

    our_xml.write("station.xml", format="stationxml", validate=True)
    our_xml.write("station.txt", format="stationtxt")
Ejemplo n.º 34
0
                        endtime=UTCDateTime(2016, 4, 30, 23, 59, 59),
                        latitude=36.80060501882054,
                        longitude=137.6569971141782,
                        mindepth=60,
                        minmagnitude=6,
                        minradius=30,
                        maxradius=90)
print(cat)

# Calculate travel time for the accurate picking of onset from teleseismic waveform
p_tttable = []
s_tttable = []
sta_lat, sta_long = 35.5038, 136.7939  # latitude and longitude of specified station (N.TKTH)
model = TauPyModel(model="iasp91")
for i in range(len(cat)):
    distance = locations2degrees(sta_lat, sta_long, cat[i].origins[0].latitude,
                                 cat[i].origins[0].longitude)
    parrivals = model.get_travel_times(
        source_depth_in_km=cat[i].origins[0].depth / 1000,
        distance_in_degree=distance,
        phase_list=['P'])
    sarrivals = model.get_travel_times(
        source_depth_in_km=cat[i].origins[0].depth / 1000,
        distance_in_degree=distance,
        phase_list=['S'])
    p_time = parrivals[0].time
    s_time = sarrivals[0].time
    p_tttable.append(p_time)
    s_tttable.append(s_time)

# Get P and S onset time of each earthquakes
p_starttime = []
Ejemplo n.º 35
0
    def writeEvents(self, centercoords=None):
        '''Write event information to file, which can be loaded as a pandas dataframe.
		Specify centercoords as a list [lon,lat] and the time of the first arrival (P) arrival
		will be reported'''

        ofname = 'Events_%s_%s_%s_%s_%s_%s_mag_%s-%s_depth_%s-%s_km.dat' %(self.starttime,self.endtime,self.minlatitude,\
         self.minlongitude,self.maxlatitude,self.maxlongitude,self.minmag,self.maxmag,self.mindepth,self.maxdepth)

        outfile = open(ofname, 'w')

        if self.quake_cat == None:

            print("Need to call fetchEvents first")
            sys.exit(1)

        if centercoords == None:

            for event in self.quake_cat:

                time = event.origins[0].time
                lat = event.origins[0].latitude
                lon = event.origins[0].longitude
                dep = event.origins[0].depth / 1000.
                mag = event.magnitudes[0].mag

                if self.station_autoselect_flag == True:

                    cnt = 0

                    for network in self.inventory:
                        for station in network:

                            stlat = station.latitude
                            stlon = station.longitude

                            ddeg = locations2degrees(lat, lon, stlat, stlon)
                            distance_m, az, baz = gps2dist_azimuth(
                                lat, lon, stlat, stlon)

                            theta = np.arctan2(distance_m, dep * 1000.)

                            if theta <= np.pi / 4:

                                arrivals = self.vmodel.get_travel_times(
                                    source_depth_in_km=dep,
                                    distance_in_degree=ddeg,
                                    phase_list=["s", "S"])

                                if len(arrivals) > 0:
                                    cnt = cnt + 1
                    if cnt > 0:
                        outfile.write("%s %s %s %s %s\n" %
                                      (lon, lat, dep, mag, time))
                else:
                    outfile.write("%s %s %s %s %s\n" %
                                  (lon, lat, dep, mag, time))

        #haven't added the SWW here, so in this case all events wiil be written to the file, might change in the future if needed

        #In this case, we write the time of the first arriving phase at the stations

        else:

            try:
                clon = centercoords[1]
                clat = centercoords[0]
            except:
                print("Centercoors needs to be entered as list [lon,lat]")
                sys.exit(1)

            for event in self.quake_cat:

                time = event.origins[0].time
                lat = event.origins[0].latitude
                lon = event.origins[0].longitude
                dep = event.origins[0].depth / 1000.0

                try:

                    cdist = locations2degrees(lat, lon, clat, clon)
                    arrivals = self.vmodel.get_travel_times(source_depth_in_km=dep,\
                    distance_in_degree=cdist,phase_list=["p","P"])
                except:
                    continue

                if len(arrivals) > 0:
                    first_phase = arrivals[0].name
                    first_phase_time = time + arrivals[0].time

                else:
                    first_phase = 'NaN'
                    first_phase_time = "NaN"

                mag = event.magnitudes[0].mag

                outfile.write("%s %s %s %s %s %s %s %s\n" %
                              (lon, lat, dep, mag, time, first_phase_time,
                               first_phase, cdist))

        outfile.close()
Ejemplo n.º 36
0
minMag=6.0


EventCatalog = getEvents4Station(staLat,staLon,begintime,endtime,minRad,maxRad,minMag)
print("There are "+ str(EventCatalog.count()) + " events in the catalog")

#need to get data based on event arrival time
# step 1. calculate station/event distance
# step 2. calc phase arrival time
#dataStart=[]
#dataEnd=[]
for event in EventCatalog:
    evLat=event.origins[0]['latitude']
    evLon=event.origins[0]['longitude']
    evDepth=event.origins[0]['depth']/1000.0
    DegDist = locations2degrees(staLat,staLon,evLat,evLon)
    pTime = getPwaveArrival(evDepth,DegDist)
    dataStart = event.origins[0]['time']+pTime-10
    dataEnd = event.origins[0]['time']+pTime+60
    print(dataStart.year)
    
    #dataStream=getMSDdata(sta,net,chan,comp,dataStart,dataEnd)
    dataStream=getTR1data(sta,net,chan,comp,dataStart,dataEnd)
    dataStream.plot()

    
##count =0
#for date in dataStart:
#    print(date.year)
#    count =count+1
##put in check for day
Ejemplo n.º 37
0
    def GetData(self,stationdirpath='stations',datadirpath='waveforms',req_type='continuous',\
     chunklength=86400,tracelen=20000, vmodel='ak135'):
        '''Call obspy mass downloader to get waveform data. Chunklength refers to the trace length option
		for a continuous download, tracelen is for an event-based request'''

        #Currently set up to download one day worth of data in the continuous mode, 2000 seconds
        #in the event-based mode

        self.stationdirpath = stationdirpath
        self.datadirpath = datadirpath

        from obspy.clients.fdsn.mass_downloader import RectangularDomain, CircularDomain,\
        Restrictions, MassDownloader

        if req_type == 'continuous':

            #Get data from all stations within this domain

            domain = RectangularDomain(minlatitude=self.minlatitude,maxlatitude=self.maxlatitude,\
             minlongitude=self.minlongitude,maxlongitude=self.maxlongitude)

            #Download data in daily segements - may want to change

            restrictions = Restrictions(\
                                    starttime=self.starttime,endtime=self.endtime,\
                                    chunklength_in_sec=chunklength,\
                                    channel=self.channel,station=self.station,location="",\
                                    reject_channels_with_gaps=False,\
                                    minimum_length=0.0,minimum_interstation_distance_in_m=100.0)

            #Call mass downloader to get the waveform information

            mdl = MassDownloader(providers=[self.clientname])

            mdl.download(domain,
                         restrictions,
                         mseed_storage=datadirpath,
                         stationxml_storage=stationdirpath)

        elif req_type == 'event':

            if self.quake_cat == None:

                print(
                    "Stop: Must call fetchEvents first to get event catalog to download from"
                )
                sys.exit(1)

            #Add option for non-continuous download - event/station pairing for example

            #Ger data for all stations in this domain

            domain = RectangularDomain(minlatitude=self.minlatitude,maxlatitude=self.maxlatitude,\
             minlongitude=self.minlongitude,maxlongitude=self.maxlongitude)

            for event in self.quake_cat:
                cnt = 0.
                print("Downloading data for event %s" % event)

                #For each event, download the waveforms at all stations requested

                origin_time = event.origins[0].time

                vel_model = TauPyModel(model=vmodel)

                #case where we only want to download data for some station-event pairs'
                stations_to_exclude = []

                if self.station_autoselect_flag == True:

                    stations_to_download = []
                    evlat = event.origins[0].latitude
                    evlon = event.origins[0].longitude

                    #EK changes added 04/2019
                    evdep = event.origins[0].depth

                    for network in self.inventory:

                        for station in network:

                            stlat = station.latitude
                            stlon = station.longitude

                            #EK 04/2019
                            #this downloads data within Short Wave Window (SWW), a cone under the station bounded by an angle, here we chose 45 deg
                            #calculate distance between eq and station and azimuth

                            ddeg = locations2degrees(evlat, evlon, stlat,
                                                     stlon)
                            distance_m, az, baz = gps2dist_azimuth(
                                evlat, evlon, stlat, stlon)

                            #calculate proxy for incident angle

                            theta = np.arctan2(distance_m, evdep)

                            if theta <= np.pi / 4:

                                #find if station has needed arrival

                                arrivals = vel_model.get_travel_times(
                                    source_depth_in_km=evdep / 1000.,
                                    distance_in_degree=ddeg,
                                    phase_list=["s", "S"])
                                if len(arrivals) > 0:

                                    #get stations you want to download

                                    stations_to_download.append(station.code)
                                    print(station.code,
                                          'angle = %.2f' % np.rad2deg(theta))
                                    print(arrivals)
                                    cnt = cnt + 1
                                else:
                                    stations_to_exclude.append(station.code)
                            else:

                                if station.code not in stations_to_exclude:
                                    stations_to_exclude.append(station.code)

                    print(
                        "\n-------------\n%g event-station pairs found in SWW\n-------------\n"
                        % cnt)
                    print(
                        "\n-------------\nSelecting just the following stations for download\n-------------\n"
                    )
                    print(stations_to_download)

                    #this approach doesn't work, use exclude_stations flag later
                    #restrictions = Restrictions(starttime=origin_time,endtime=origin_time + tracelen,\
                    #reject_channels_with_gaps=False, minimum_length=0.95, minimum_interstation_distance_in_m=10E3,\
                    #channel=self.channel,location="",network=self.network,station=stations_to_download)

                #case where we have single network

                if self.network:

                    restrictions = Restrictions(starttime=origin_time,endtime=origin_time + tracelen,\
                     reject_channels_with_gaps=False, minimum_length=0.95, minimum_interstation_distance_in_m=10E3,\
                     channel=self.channel,location="",network=self.network,exclude_stations=stations_to_exclude)

                #Case where we want all networks within a region (assumes that we also want all stations unless we have built
                # a stations to exclude list)

                else:

                    restrictions = Restrictions(starttime=origin_time,endtime=origin_time + tracelen,\
                     reject_channels_with_gaps=False, minimum_length=0.95, minimum_interstation_distance_in_m=10E3,\
                     channel=self.channel,exclude_stations=stations_to_exclude)

                mdl = MassDownloader(providers=[self.clientname])

                mdl.download(domain, restrictions, mseed_storage=datadirpath,\
                 stationxml_storage=stationdirpath)
def get_property_times(stla, stlo, evla, evlo, evdp):
    property_times = {
        "p": None,  # either p or P
        "s": None,
        "rayleigh": None,
        "love": None,
        "ss": None,
        "pp": None,
        "sp": None,
        "scs": None,
        "gcarc": None,
        "azimuth": None,
        "stla": stla,
        "stlo": stlo,
    }

    # sphere gcircle distance, since taup use sphere
    gcarc = locations2degrees(stla, stlo, evla, evlo)
    property_times["gcarc"] = gcarc

    # calculate first arrivals
    arrivals = model.get_travel_times(source_depth_in_km=evdp,
                                      distance_in_degree=gcarc,
                                      phase_list=[
                                          "p", "P", "s", "S", "3.5kmps",
                                          "4.0kmps", "sS", "SS", "pP", "PP",
                                          "sP", "SP", "ScS"
                                      ])

    for item in arrivals:
        # find p
        if (property_times["p"] == None):
            if (item.name == "p" or item.name == "P"):
                property_times["p"] = item.time

        # find s
        if (property_times["s"] == None):
            if (item.name == "s" or item.name == "S"):
                property_times["s"] = item.time

        # find surface wave
        if (property_times["rayleigh"] == None):
            if (item.name == "3.5kmps"):
                property_times["rayleigh"] = item.time
        if (property_times["love"] == None):
            if (item.name == "4.0kmps"):
                property_times["love"] = item.time

        # find pp,ss,sp
        if (property_times["pp"] == None):
            if (item.name == "pP" or item.name == "PP"):
                property_times["pp"] = item.time
        if (property_times["ss"] == None):
            if (item.name == "sS" or item.name == "SS"):
                property_times["ss"] = item.time
        if (property_times["sp"] == None):
            if (item.name == "sP" or item.name == "SP"):
                property_times["sp"] = item.time

        # find scs
        if (property_times["scs"] == None):
            if (item.name == "ScS"):
                property_times["scs"] = item.time

    # get azimuth, from the source to the stations
    _, property_times["azimuth"], _ = gps2dist_azimuth(evla, evlo, stla, stlo)

    # always could success
    return property_times
    ('pP', 45),
    ('PcP', 80),
    ('PKIKP', 150),
    ('PKiKP', 100),
    ('S', 65),
    ('sP', -30),
    ('ScS', -60),
    ('SKS', -82),
    ('ScP', -40),
    ('Pdiff', -120),
    ('PKP', -160),
    ('SKiKP', -100),
    ('SKP', -140)
])
# Calculated constants
STA_DIST = locations2degrees(EQLAT, EQLON, STA_LAT,
                             STA_LON)  # distance to local station
EQLATLON = (EQLAT, EQLON)
BUFFER = 60  # time before and after plot for taper data
START_TIME = UTCDateTime(EQTIME)
END_TIME = START_TIME + DURATION
# Pretty paired colors. Reorder to have saturated colors first and remove
# some colors at the end. This cmap is compatible with obspy taup
cmap = get_cmap('Paired', lut=12)
COLORS = [
    '#%02x%02x%02x' % tuple(int(col * 255) for col in cmap(i)[:3])
    for i in range(12)
]
COLORS = COLORS[1:][::2][:-1] + COLORS[::2][:-1]

#COLORS = [ cm.plasma(x) for x in linspace(0, 0.8, len(PHASES)) ] # colours from 0.8-1.0 are not very visible
# End of parameters to define
Ejemplo n.º 40
0
# picking and write sac 
cata_xml = obspy.read_events("gcmt_00_15.xml")
for eventss in cata_xml:
  orig =  eventss.origins[0]
  event_time = orig.time
  dt=UTCDateTime(event_time)
  yy = str(dt.year)
  jday = str(dt.julday).zfill(3)
  yrday = yy + '.' + jday
  src_path = database + yrday
  
  if os.path.isdir(src_path):
    event_lat = orig.latitude
    event_lon = orig.longitude
    event_dep = orig.depth
    dis_degree = locations2degrees(event_lat, event_lon, sta_lat, sta_lon)
    arrivals = model.get_travel_times( source_depth_in_km=event_dep/1000.0, distance_in_degree=dis_degree, phase_list=ph_list)
    
    if not os.path.isdir(dst_path+yrday):      
      shutil.copytree(src_path, dst_path+yrday)         
      print("Move "+src_path+" to "+dst_path+yrday)
      
      os.chdir(dst_path+yrday)
      rm = glob.glob('*.BDH.SAC')
      rm.append('sac.out')
      for f in rm:
        os.remove(dst_path+yrday+'/'+f)
      st = read('*')
      for tr in st:
        sac = SACTrace.from_obspy_trace(tr)
        sac.stla=sta_lat