Example #1
0
def get_travel_time(sourcelatitude, sourcelongitude, sourcedepthinmeters,
                    receiverlatitude, receiverlongitude,
                    receiverdepthinmeters, phase_name, db_info):
    """
    Fully working travel time callback implementation.
    """
    if receiverdepthinmeters:
        raise ValueError("This travel time implementation cannot calculate "
                         "buried receivers.")

    great_circle_distance = geodetics.locations2degrees(
        sourcelatitude, sourcelongitude, receiverlatitude, receiverlongitude)

    try:
        tts = MODEL.get_travel_times(
            source_depth_in_km=sourcedepthinmeters / 1000.0,
            distance_in_degree=great_circle_distance,
            phase_list=[phase_name])
    except Exception as e:
        raise ValueError(str(e))

    if not tts:
        return None

    # For any phase, return the first time.
    return tts[0].time
def test_build_distance_matrix(spoints):
    obj = SphereDistRel(spoints, normalize_mode="average")
    dist_m = obj._build_distance_matrix()

    d1 = 20
    d2 = locations2degrees(10, 10, 10, -10)
    d3 = locations2degrees(10, 10, -10, -10)

    true_m = np.array([[0, d2, d3, d1],
                       [d2, 0, d1, d3],
                       [d3, d1, 0, d2],
                       [d1, d3, d2, 0]])
    npt.assert_allclose(dist_m, true_m)

    obj.calculate_weight(10.0)
    npt.assert_allclose(obj.points_weights, [1.0, 1.0, 1.0, 1.0])
Example #3
0
def calculate_time_phase(event, sta):
    """
    calculate arrival time of the requested phase to use in retrieving
    waveforms.
    :param event:
    :param sta:
    :return:
    """

    ev_lat = event['latitude']
    ev_lon = event['longitude']
    ev_dp = abs(float(event['depth']))
    sta_lat = float(sta[4])
    sta_lon = float(sta[5])
    delta = locations2degrees(ev_lat, ev_lon, sta_lat, sta_lon)
    tt = getTravelTimes(delta, ev_dp)
    phase_list = ['P', 'Pdiff', 'PKIKP']

    time_ph = 0
    flag = False
    for ph in phase_list:
        for i in range(len(tt)):
            if tt[i]['phase_name'] == ph:
                flag = True
                time_ph = tt[i]['time']
                break
            else:
                continue
        if flag:
            print 'Phase: %s' % ph
            break
    t_start = event['t1'] + time_ph
    t_end = event['t2'] + time_ph
    return t_start, t_end
Example #4
0
def distance(lat1, lon1, lat2, lon2):
    """
    Given two points location by (latitude, longitude) and return
    the distance on sphere between two points, unit in degree
    :return: distance in degree
    """
    return locations2degrees(lat1, lon1, lat2, lon2)
Example #5
0
def calculate_time_phase(event, sta, bg_model='iasp91'):
    """
    calculate arrival time of the requested phase
    :param event:
    :param sta:
    :param bg_model:
    :return:
    """
    phase_list = ['P', 'Pdiff', 'PKIKP']

    time_ph = 0
    ev_lat = event['latitude']
    ev_lon = event['longitude']
    evdp = abs(float(event['depth']))
    sta_lat = float(sta[4])
    sta_lon = float(sta[5])
    dist = locations2degrees(ev_lat, ev_lon, sta_lat, sta_lon)

    try:
        from obspy.taup import tau
        tau_bg = tau.TauPyModel(model=bg_model)
    except:
        tau_bg = False

    if not tau_bg:
        try:
            tt = getTravelTimes(dist, evdp)
            flag = False
            for ph in phase_list:
                for i in range(len(tt)):
                    if tt[i]['phase_name'] == ph:
                        flag = True
                        time_ph = tt[i]['time']
                        break
                    else:
                        continue
            if not flag:
                time_ph = 0
        except:
            time_ph = 0
    else:
        try:
            for ph in phase_list:
                tt = tau_bg.get_travel_times(evdp, dist,
                                             phase_list=[ph])[0].time
                if not tt:
                    time_ph = 0
                    continue
                else:
                    time_ph = tt
                    break
        except:
            time_ph = 0

    t_start = event['t1'] + time_ph
    t_end = event['t2'] + time_ph
    return t_start, t_end
Example #6
0
 def assert_loc_np(lat1, long1, lat2, long2,
                   approx_distance, expected_output_len):
     loc2deg = locations2degrees(np.array(lat1),
                                 np.array(long1),
                                 np.array(lat2),
                                 np.array(long2))
     self.assertTrue((np.abs(np.radians(loc2deg) * 6371 -
                             approx_distance) <= 20).all())
     self.assertTrue(np.isscalar(loc2deg)
                     if expected_output_len == 0 else
                     len(loc2deg) == expected_output_len)
def waveform_plot(fig,
                  tr_z,
                  tr_n,
                  tr_e,
                  source: Origin,
                  reciever: Station,
                  phases: list = None,
                  position=224):
    """ Waveform plot - animated. """
    ax = fig.add_subplot(position)
    # Normalise to maximum amplitude across traces
    max_amp = max(tr_z.data.max(), tr_e.data.max(), tr_n.data.max())
    assert tr_z.stats.starttime == tr_e.stats.starttime == tr_n.stats.starttime
    assert tr_z.stats.npts == tr_n.stats.npts == tr_e.stats.npts
    times = [(tr_z.stats.starttime + t).datetime
             for t in (np.arange(tr_z.stats.npts) / tr_z.stats.sampling_rate)]
    z, n, e = tr_z.data / max_amp, tr_n.data / max_amp, tr_e.data / max_amp
    # Offset each trace by that max amplitude and plot - turn off y-ticks
    n += e.max() + abs(n.min())
    z += n.max() + abs(z.min())
    # Plot in grey first, then animate in black over the top.
    ax.plot(times, e, color="lightgrey")
    ax.plot(times, n, color="lightgrey")
    ax.plot(times, z, color="lightgrey")
    ax.set_yticks([])
    ax.text(x=times[0], y=e.mean() + 0.2, s=tr_e.id)
    ax.text(x=times[0], y=n.mean() + 0.2, s=tr_n.id)
    ax.text(x=times[0], y=z.mean() + 0.2, s=tr_z.id)
    ax.set_xticklabels(ax.get_xticklabels(), rotation=30, ha="right")

    # Plot arrivals
    phases = phases or DEFAULT_PHASES
    degrees_dist = locations2degrees(reciever.latitude, reciever.longitude,
                                     source.latitude, source.longitude)
    arrivals = MODEL.get_travel_times(source_depth_in_km=source.depth / 1000.0,
                                      distance_in_degree=degrees_dist,
                                      phase_list=phases)
    phase_list_plotted = []
    i = 0
    for arrival in arrivals:
        arrival_time = source.time + arrival.time
        if arrival_time < tr_z.stats.endtime:
            ax.axvline(arrival_time.datetime,
                       color=COLORS[i % len(COLORS)],
                       label=arrival.name)
            phase_list_plotted.append(arrival.name)
            i += 1
    ax.legend()
    return fig, ax, phase_list_plotted
def get_travel_times(station, earthquake):
    """
    Calculate travel times for phases using obspy.

    Return a dictionary with phase name as key and arrival time as the value.
    """
    dist = locations2degrees(station[0], station[1],
                             earthquake[0], earthquake[1])
    model = TauPyModel(model='iasp91')
    arrivals = model.get_travel_times(source_depth_in_km=earthquake[2],
                                      distance_in_degree=dist)
    travel_times = {}
    for arrival in arrivals:
        travel_times[arrival.name] = arrival.time
    return travel_times
    def slowness_correct(self,tr,phase_list):
        '''Functions that corrects the slowness of the incoming P wave from a
        teleseismic earthquake to vertical incidence.
        '''
        # Calculate the slowness of the incoming P wave given station and
        # source locations
        # Calculate distance between source and receiver
        distance = locations2degrees(tr.stats.sac.evla,
                                     tr.stats.sac.evlo,
                                     tr.stats.sac.stla,
                                     tr.stats.sac.stlo)
        # Initialise the earth model
        model = TauPyModel(model="ak135")
        # Get the slowness
        arrivals = model.get_travel_times(tr.stats.sac.evdp/1000,
                                          distance,phase_list=phase_list)
        slowness = arrivals[0].ray_param*(np.pi/19980) # s/rad to s/km

        # Create an array of the uncorrected reflection times of the trace
        rtimes = np.linspace(0,650,tr.stats.npts)
        # Loop over the reflection times to calculate the corrected reflection
        # time
        corr_rtimes = []
        for rtime in tr.times():
            # Calculate appropriate correction velocity
            if rtime <= 5.0:
                v0 = 4.0
            elif rtime <= 13.0 and rtime > 5.0:
                v0 = 6.0
            else:
                v0 = 8.0
            # Correct the reflection time
            #v0 = 6.0
            corr_rtimes.append(rtime/(1.0-(0.5*(v0**2.0)*slowness**2.0)))

        # Interpolate the rtimes to smooth out sampling rates changes
        #times = np.interp(tr.data,corr_rtimes,tr.data)
        #f = interp1d(tr.data, corr_rtimes)
        #rtimes = f(tr.data)

        # Calculate end point and gap between the new rtimes
        gap = corr_rtimes[1] - corr_rtimes[0]

        # Create a trace with times corresponding to the new rtimes
        tr.stats.sampling_rate = 1.0/gap

        return tr
def extract_distance_info(asdffile, windows):
    stations = []
    for sta in windows:
        stations.append(sta)
    ds = pyasdf.ASDFDataSet(asdffile)
    sta_dict = extract_waveform_stations(ds, stations=stations)

    event = ds.events[0]
    origin = event.preferred_origin()
    event_lat = origin.latitude
    event_lon = origin.longitude
    event_depth = origin.depth / 1000.0

    dists = {}
    for sta_id, sta_loc in sta_dict.iteritems():
        dists[sta_id] = locations2degrees(event_lat, event_lon,
                                          sta_loc[0], sta_loc[1])
    return dists, event_depth
Example #11
0
def travel_time_calc(evla, evlo, stla, stlo, evdp, bg_model):
    """
    calculate arrival time of different seismic phases
    :param evla:
    :param evlo:
    :param stla:
    :param stlo:
    :param evdp:
    :param bg_model:
    :return:
    """
    # --------------- TAUP
    dist = locations2degrees(evla, evlo, stla, stlo)
    try:
        tt = [_i for _i in getTravelTimes(dist, evdp, bg_model)
              if 'Pdiff' == _i['phase_name']][0]['time']
    except Exception, e:
        tt = False
Example #12
0
        inventory += obspy.read_inventory(recfile)

    stats = []
    stat_names = []
    # Create list of stations, independent of network
    for network in inventory:
        for stat in network:
            # Remove duplicate stations
            if (stat.code not in stat_names):
                stats.append(stat)
                stat_names.append(stat.code)

# Sort stations by distance to source
stats_sort = sorted(stats, key=lambda stat:
                    locations2degrees(stat.latitude,
                                      stat.longitude,
                                      origin.latitude,
                                      origin.longitude))
nrec = len(stats_sort)

with open('receiver.dat', 'w') as fid:
    fid.write('%d   # nr of receivers\n' % nrec)
    fid.write('Z    # component\n')

    phase_list = args.phases

    for stat in stats_sort:
        distance = locations2degrees(stat.latitude, stat.longitude,
                                     origin.latitude, origin.longitude)
        # print distance
        tt = model.get_travel_times(distance_in_degree=distance,
                                    source_depth_in_km=origin.depth*1e-3,
Example #13
0
def _validate_and_write_waveforms(st, callback, starttime, endtime, scale,
                                  source, receiver, db, label, format):
    if not label:
        label = ""
    else:
        label += "_"

    for tr in st:
        # Half the filesize but definitely sufficiently accurate.
        tr.data = np.require(tr.data, dtype=np.float32)

    if scale != 1.0:
        for tr in st:
            tr.data *= scale

    # Sanity checks. Raise internal server errors in case something fails.
    # This should not happen and should have been caught before.
    if endtime > st[0].stats.endtime:
        msg = ("Endtime larger than the extracted endtime: endtime=%s, "
               "largest db endtime=%s" % (
                _format_utc_datetime(endtime),
                _format_utc_datetime(st[0].stats.endtime)))
        callback((tornado.web.HTTPError(500, log_message=msg, reason=msg),
                  None))
        return
    if starttime < st[0].stats.starttime - 3600.0:
        msg = ("Starttime more than one hour before the starttime of the "
               "seismograms.")
        callback((tornado.web.HTTPError(500, log_message=msg, reason=msg),
                  None))
        return

    if isinstance(source, FiniteSource):
        mu = None
    else:
        mu = st[0].stats.instaseis.mu

    # Trim, potentially pad with zeroes.
    st.trim(starttime, endtime, pad=True, fill_value=0.0, nearest_sample=False)

    # Checked in another function and just a sanity check.
    assert format in ("miniseed", "saczip")

    if format == "miniseed":
        with io.BytesIO() as fh:
            st.write(fh, format="mseed")
            fh.seek(0, 0)
            binary_data = fh.read()
        callback((binary_data, mu))
    # Write a number of SAC files into an archive.
    elif format == "saczip":
        byte_strings = []
        for tr in st:
            # Write SAC headers.
            tr.stats.sac = obspy.core.AttribDict()
            # Write WGS84 coordinates to the SAC files.
            tr.stats.sac.stla = geocentric_to_elliptic_latitude(
                receiver.latitude)
            tr.stats.sac.stlo = receiver.longitude
            tr.stats.sac.stdp = receiver.depth_in_m
            tr.stats.sac.stel = 0.0
            if isinstance(source, FiniteSource):
                tr.stats.sac.evla = geocentric_to_elliptic_latitude(
                    source.hypocenter_latitude)
                tr.stats.sac.evlo = source.hypocenter_longitude
                tr.stats.sac.evdp = source.hypocenter_depth_in_m
                # Force source has no magnitude.
                if not isinstance(source, ForceSource):
                    tr.stats.sac.mag = source.moment_magnitude
                src_lat = source.hypocenter_latitude
                src_lng = source.hypocenter_longitude
            else:
                tr.stats.sac.evla = geocentric_to_elliptic_latitude(
                    source.latitude)
                tr.stats.sac.evlo = source.longitude
                tr.stats.sac.evdp = source.depth_in_m
                # Force source has no magnitude.
                if not isinstance(source, ForceSource):
                    tr.stats.sac.mag = source.moment_magnitude
                src_lat = source.latitude
                src_lng = source.longitude
            # Thats what SPECFEM uses for a moment magnitude....
            tr.stats.sac.imagtyp = 55
            # The event origin time relative to the reference which I'll
            # just assume to be the starttime here?
            tr.stats.sac.o = source.origin_time - starttime

            # Sac coordinates are elliptical thus it only makes sense to
            # have elliptical distances.
            dist_in_m, az, baz = gps2dist_azimuth(
                lat1=tr.stats.sac.evla,
                lon1=tr.stats.sac.evlo,
                lat2=tr.stats.sac.stla,
                lon2=tr.stats.sac.stlo)

            tr.stats.sac.dist = dist_in_m / 1000.0
            tr.stats.sac.az = az
            tr.stats.sac.baz = baz

            # XXX: Is this correct? Maybe better use some function in
            # geographiclib?
            tr.stats.sac.gcarc = locations2degrees(
                lat1=src_lat,
                long1=src_lng,
                lat2=receiver.latitude,
                long2=receiver.longitude)

            # Set two more headers. See #45.
            tr.stats.sac.lpspol = 1
            tr.stats.sac.lcalda = 0

            # Some provenance.
            tr.stats.sac.kuser0 = "InstSeis"
            tr.stats.sac.kuser1 = db.info.velocity_model[:8]
            tr.stats.sac.user0 = scale
            # Prefix version numbers to identify them at a glance.
            tr.stats.sac.kt7 = "A" + db.info.axisem_version[:7]
            tr.stats.sac.kt8 = "I" + __version__[:7]

            # Times have to be set by hand.
            t, _ = utcdatetime_to_sac_nztimes(tr.stats.starttime)
            for key, value in t.items():
                tr.stats.sac[key] = value

            with io.BytesIO() as temp:
                tr.write(temp, format="sac")
                temp.seek(0, 0)
                filename = "%s%s.sac" % (label, tr.id)
                byte_strings.append((filename, temp.read()))
        callback((byte_strings, mu))
Example #14
0
def dump_picks(event_log,vel_model,gf_list,out_file):
    '''  
    Dump P and S picks to a file
    '''
    
    from obspy.taup import TauPyModel
    from obspy.geodetics.base import gps2dist_azimuth
    from obspy.geodetics import locations2degrees
    from numpy import genfromtxt,zeros,array,ones
    from string import replace
    
    
    #Read station locations
    sta=genfromtxt(gf_list,usecols=0,dtype='S')
    lonlat=genfromtxt(gf_list,usecols=[1,2])
    
    #Load velocity model for ray tracing
    velmod = TauPyModel(vel_model)
    
    # Get hypocenter
    f=open(event_log,'r')
    loop_go=True
    while loop_go:
        line=f.readline()
        if 'Hypocenter (lon,lat,z[km])' in line:
            s=replace(line.split(':')[-1],'(','')
            s=replace(s,')','')
            hypo=array(s.split(',')).astype('float')
            loop_go=False

    #compute station to hypo distances
    d=zeros(len(lonlat))
    for k in range(len(lonlat)):
        d[k],az,baz=gps2dist_azimuth(lonlat[k,1],lonlat[k,0],hypo[1],hypo[0])
        d[k]=d[k]/1000
        

    f=open(out_file,'w')
    f.write('# sta,lon,lat,ptime(s),stime(s)\n')
    
    for k in range(len(sta)):
        
        
        # Ray trace
        deg=locations2degrees(hypo[1],hypo[0],lonlat[k,1],lonlat[k,0])
        try:
            arrivals = velmod.get_travel_times(source_depth_in_km=hypo[2],distance_in_degree=deg,phase_list=['P','Pn','S','Sn','p','s'])
        except:
            arrivals = velmod.get_travel_times(source_depth_in_km=hypo[2]-1.056,distance_in_degree=deg,phase_list=['P','Pn','S','Sn','p','s'])

        ptime=1e6
        stime=1e6
            
        #Determine P and S arrivals
        for kphase in range(len(arrivals)):
            if 'P' == arrivals[kphase].name or 'p' == arrivals[kphase].name or 'Pn' == arrivals[kphase].name:
                if arrivals[kphase].time<ptime:
                    ptime=arrivals[kphase].time
            if 'S' == arrivals[kphase].name or 's' == arrivals[kphase].name or 'Sn' == arrivals[kphase].name:
                if arrivals[kphase].time<stime:
                    stime=arrivals[kphase].time
            
        lon=lonlat[k,0]
        lat=lonlat[k,1] 
        station=sta[k]       
        line='%s\t%.4f\t%.4f\t%10.4f\t%10.4f\n' % (station,lon,lat,ptime,stime)
        f.write(line)
        
    f.close()
Example #15
0
def spatial_stack(filelst, sigma=0.5, weight_threshold=1.2e-4, filt=True, freqmin=0.5, freqmax=4, savepath="SDI/spatial_stack"):

    # savepath = "SDI/spatial_stack"
    try:
        os.makedirs(savepath)
    except:
        pass

    filelst.sort()
    for file1 in filelst:
        print file1

        basename = os.path.basename(file1)

        tr1 = read(file1)[0]
        summ = np.zeros_like(tr1.data)

        lat1 = tr1.stats.sac.stla
        lon1 = tr1.stats.sac.stlo

        sum_weight = 0.0

        for file2 in filelst:
            tr2 = read(file2)[0]

            if tr1.stats.channel!=tr2.stats.channel:
                continue

            lat2 = tr2.stats.sac.stla
            lon2 = tr2.stats.sac.stlo

            dist = locations2degrees(lat1=lat1, long1=lon1, lat2=lat2, long2=lon2)

            weight = math.exp(-dist*dist/sigma/sigma)
            if weight<weight_threshold:
                continue

            if filt:
                tr2.filter(type="bandpass", freqmin=freqmin, freqmax=freqmax)

            tr2.normalize()

            try:
                sum_weight += weight
                summ += tr2.data*weight
            except:
                continue

            # print file2, dist, weight

        # print sum_weight

        if sum_weight<1.5:
            continue

        tr1.data = summ/sum_weight

        tr1.write(filename=savepath+"/"+basename, format="SAC")


        # return


    pass
Example #16
0
def distaz_query(records, deg=None, km=None, swath=None):
    """ 
    Out-of-database subset based on distances and/or azimuths.
    
    Parameters
    ----------
    records : iterable of objects with lat, lon attribute floats
        Target of the subset.
    deg : list or tuple of numbers, optional
        (centerlat, centerlon, minr, maxr)
        minr, maxr in degrees or None for unconstrained.
    km : list or tuple of numbers, optional
        (centerlat, centerlon, minr, maxr)
        minr, maxr in km or None for unconstrained.
    swath : list or tuple of numbers, optional
        (lat, lon, azimuth, tolerance)
        Azimuth (from North) +/-tolerance from lat,lon point in degrees.

    Returns
    -------
    list
        Subset of supplied records.

    """
    #initial True array to propagate through multiple logical AND comparisons
    mask0 = np.ones(len(records), dtype=np.bool)

    if deg:
        dgen = (geod.locations2degrees(irec.lat, irec.lon, deg[0], deg[1]) \
                for irec in records)
        degrees = np.fromiter(dgen, dtype=float)
        if deg[2] is not None:
            mask0 = np.logical_and(mask0, deg[2] <= degrees)
        if deg[3] is not None:
            mask0 = np.logical_and(mask0, deg[3] >= degrees)

        #mask0 = np.logical_and(mask0, mask)

    if km:
        #???: this may be backwards
        mgen = (geod.gps2DistAzimuth(irec.lat, irec.lon, km[0], km[1])[0] \
                  for irec in records)
        kilometers = np.fromiter(mgen, dtype=float)/1e3
        #meters, azs, bazs = zip(*valgen)
        #kilometers = np.array(meters)/1e3
        if km[2] is not None:
            mask0 = np.logical_and(mask0, km[2] <= kilometers)
        if km[3] is not None:
            mask0 = np.logical_and(mask0, km[3] >= kilometers)

        #mask0 = np.logical_and(mask0, mask)

    if swath is not None:
        minaz = swath[2] - swath[3]
        maxaz = swath[2] + swath[3]
        #???: this may be backwards
        azgen = (geod.gps2DistAzimuth(irec.lat, irec.lon, km[0], km[1])[1] \
                 for irec in records)
        azimuths = np.fromiter(azgen, dtype=float)
        mask0 = np.logical_and(mask0, azimuths >= minaz)
        mask0 = np.logical_and(mask0, azimuths <= maxaz)

    #idx = np.nonzero(np.atleast_1d(mask0))[0] ???
    idx = np.nonzero(mask0)[0]
    recs = [records[i] for i in idx]

    return recs
Example #17
0
    def _get_seismograms_sanity_checks(self, source, receiver, components,
                                       kind, dt):
        """
        Common sanity checks for the get_seismograms method. Also parses
        source and receiver objects if necessary.

        :param source: instaseis.Source or instaseis.ForceSource object
        :type source: :class:`instaseis.source.Source` or
            :class:`instaseis.source.ForceSource`
        :param receiver: instaseis.Receiver object
        :type receiver: :class:`instaseis.source.Receiver`
        :param components: a tuple containing any combination of the
            strings ``"Z"``, ``"N"``, ``"E"``, ``"R"``, and ``"T"``
        :param kind: 'displacement', 'velocity' or 'acceleration'
        """
        if dt is not None:
            if dt <= 0.0:
                raise ValueError("dt must be bigger than 0.")
            elif dt > self.info.dt:
                raise ValueError(
                    "The database is sampled with a sample spacing of %.3f "
                    "seconds. You must not pass a 'dt' larger than that as "
                    "that would be a downsampling operation which Instaseis "
                    "does not do." % self.info.dt)

        # Attempt to parse them if the types are not correct.
        if not isinstance(source, Source) and \
                not isinstance(source, ForceSource):
            source = Source.parse(source)
        if not isinstance(receiver, Receiver):
            # This only works in the special case of one station, otherwise
            # it has to be called more then once.
            rec = Receiver.parse(receiver)
            if len(rec) != 1:
                raise ValueError("Receiver object/file contains multiple "
                                 "stations. Please parse outside the "
                                 "get_seismograms() function and call in a "
                                 "loop.")
            receiver = rec[0]

        if kind not in ['displacement', 'velocity', 'acceleration']:
            raise ValueError("unknown kind '%s'" % (kind,))

        for comp in components:
            if comp not in ["N", "E", "Z", "R", "T"]:
                raise ValueError("Invalid component: %s" % comp)

        if self.info.is_reciprocal:
            if receiver.depth_in_m is not None:
                warnings.warn('Receiver depth cannot be changed when reading '
                              'from reciprocal DB. Using depth from the DB.')

            if any(comp in components for comp in ['N', 'E', 'R', 'T']) and \
                    "horizontal" not in self.info.components:
                raise ValueError("vertical component only DB")

            if 'Z' in components and "vertical" not in self.info.components:
                raise ValueError("horizontal component only DB")

        else:
            if source.depth_in_m is not None:
                warnings.warn('Source depth cannot be changed when reading '
                              'from forward DB. Using depth from the DB.')

        # Make sure that the source is within the domain.
        if self.info.is_reciprocal and source.depth_in_m is not None:
            src_radius = self.info.planet_radius - source.depth_in_m
            if src_radius < self.info.min_radius:
                msg = (
                    "Source too deep. Source would be located at a radius of "
                    "%.1f meters. The database supports source radii from "
                    "%.1f to %.1f meters." % (src_radius, self.info.min_radius,
                                              self.info.max_radius))
                raise ValueError(msg)
            elif src_radius > self.info.max_radius:
                msg = (
                    "Source is too shallow. Source would be located at a "
                    "radius of %.1f meters. The database supports source "
                    "radii from %.1f to %.1f meters." % (
                        src_radius, self.info.min_radius,
                        self.info.max_radius))
                raise ValueError(msg)
        elif not self.info.is_reciprocal and receiver.depth_in_m is not None:
            rec_radius = self.info.planet_radius - receiver.depth_in_m
            if rec_radius < self.info.min_radius:
                msg = (
                    "Receiver too deep. Receiver would be located at a radius "
                    "of %.1f meters. The database supports receiver radii "
                    "from %.1f to %.1f meters." % (
                        rec_radius, self.info.min_radius,
                        self.info.max_radius))
                raise ValueError(msg)
            elif rec_radius > self.info.max_radius:
                msg = (
                    "Receiver is too shallow. Receiver would be located at a "
                    "radius of %.1f meters. The database supports receiver "
                    "radii from %.1f to %.1f meters." % (
                        rec_radius, self.info.min_radius,
                        self.info.max_radius))
                raise ValueError(msg)

        d = locations2degrees(source.latitude, source.longitude,
                              receiver.latitude, receiver.longitude)
        if not self.info.min_d <= d <= self.info.max_d:
            raise ValueError(
                'Epicentral distance is %.1f but should be in [%.1f, '
                '%.1f].' % (d, self.info.min_d, self.info.max_d))

        return source, receiver
Example #18
0
 def _distance(lat1, lon1, lat2, lon2):
     """
     The distance(unit:degree) between 2 points on
     the sphere
     """
     return locations2degrees(lat1, lon1, lat2, lon2)
Example #19
0
    def on_stations_listWidget_currentItemChanged(self, current, previous):
        if current is None:
            return

        self._reset_all_plots()

        try:
            wave = self.comm.query.get_matching_waveforms(
                self.current_event, self.current_iteration,
                self.current_station)
        except Exception as e:
            for component in ["Z", "N", "E"]:
                plot_widget = getattr(self.ui, "%s_graph" % component.lower())
                plot_widget.addItem(pg.TextItem(
                    text=str(e), anchor=(0.5, 0.5),
                    color=(200, 0, 0)))
            return

        event = self.comm.events.get(self.current_event)

        great_circle_distance = locations2degrees(
            event["latitude"], event["longitude"],
            wave.coordinates["latitude"], wave.coordinates["longitude"])
        tts = taupy_model.get_travel_times(
            source_depth_in_km=event["depth_in_km"],
            distance_in_degree=great_circle_distance)

        windows_for_station = \
            self.current_window_manager.get_windows_for_station(
                self.current_station)

        for component in ["Z", "N", "E"]:
            plot_widget = getattr(self.ui, "%s_graph" % component.lower())
            data_tr = [tr for tr in wave.data
                       if tr.stats.channel[-1].upper() == component]
            if data_tr:
                tr = data_tr[0]
                plot_widget.data_id = tr.id
                times = tr.times()
                plot_widget.plot(times, tr.data, pen="k")
            else:
                plot_widget.data_id = None
            synth_tr = [_i for _i in wave.synthetics
                        if _i.stats.channel[-1].upper() == component]
            if synth_tr:
                tr = synth_tr[0]
                times = tr.times()
                plot_widget.plot(times, tr.data, pen="r", )

            if data_tr or synth_tr:
                for tt in tts:
                    if tt.time >= times[-1]:
                        continue
                    if tt.name[0].lower() == "p":
                        pen = "#008c2866"
                    else:
                        pen = "#95000066"
                    plot_widget.addLine(x=tt.time, pen=pen, z=-10)

            plot_widget.autoRange()

            window = [_i for _i in windows_for_station
                      if _i.channel_id[-1].upper() == component]
            if window:
                plot_widget.windows = window[0]
                for win in window[0].windows:
                    WindowLinearRegionItem(win, event, parent=plot_widget)

        self._update_raypath(wave.coordinates)
def arc_available(input_dics, event, target_path):
    """
    check the availability of ArcLink stations
    :param input_dics:
    :param event:
    :param target_path:
    :return:
    """
    print("check the availability: ArcLink")

    client_arclink = Client_arclink(user=input_dics['username_arclink'],
                                    host=input_dics['host_arclink'],
                                    port=input_dics['port_arclink'],
                                    password=input_dics['password_arclink'],
                                    timeout=input_dics['arc_avai_timeout'])

    if hasattr(client_arclink, 'get_inventory'):
        arclink_get_inventory = client_arclink.get_inventory
    elif hasattr(client_arclink, 'getInventory'):
        arclink_get_inventory = client_arclink.getInventory

    sta_arc = []
    nets_req = [x.strip() for x in input_dics['net'].split(',')]
    stas_req = [x.strip() for x in input_dics['sta'].split(',')]
    locs_req = [x.strip() for x in input_dics['loc'].split(',')]
    chas_req = [x.strip() for x in input_dics['cha'].split(',')]
    for net_req in nets_req:
        for sta_req in stas_req:
            for loc_req in locs_req:
                for cha_req in chas_req:
                    try:
                        inventories = arclink_get_inventory(
                            network=net_req,
                            station=sta_req,
                            location=loc_req,
                            channel=cha_req,
                            starttime=UTCDateTime(event['t1']),
                            endtime=UTCDateTime(event['t2']),
                            min_latitude=input_dics['mlat_rbb'],
                            max_latitude=input_dics['Mlat_rbb'],
                            min_longitude=input_dics['mlon_rbb'],
                            max_longitude=input_dics['Mlon_rbb'])

                        for inv_key in inventories.keys():
                            netsta = inv_key.split('.')
                            if len(netsta) == 4:
                                sta = '%s.%s' % (netsta[0], netsta[1])
                                if not inventories[sta]['depth']:
                                    inventories[sta]['depth'] = 0.0
                                st_id = '%s_%s_%s_%s' % (netsta[0],
                                                         netsta[1],
                                                         netsta[2],
                                                         netsta[3])
                                sta_arc.append([netsta[0], netsta[1],
                                                netsta[2], netsta[3],
                                                inventories[sta]['latitude'],
                                                inventories[sta]['longitude'],
                                                inventories[sta]['elevation'],
                                                inventories[sta]['depth'],
                                                'ARCLINK', st_id, 'NA', 'NA'])
                        if input_dics['lon_cba'] and input_dics['lat_cba']:
                            index_rm = []
                            lat1 = float(input_dics['lat_cba'])
                            lon1 = float(input_dics['lon_cba'])
                            for ai in range(len(sta_arc)):
                                dist = locations2degrees(lat1, lon1,
                                                         float(sta_arc[ai][4]),
                                                         float(sta_arc[ai][5]))
                                if not input_dics['mr_cba'] <= dist <= \
                                        input_dics['Mr_cba']:
                                    index_rm.append(ai)
                            index_rm.sort(reverse=True)
                            for ri in range(len(index_rm)):
                                del sta_arc[ri]

                    except Exception as error:
                        exc_file = open(os.path.join(target_path, 'info',
                                                     'exception'), 'at+')
                        ee = 'availability -- arclink -- %s\n' % error
                        exc_file.writelines(ee)
                        exc_file.close()
                        print('ERROR: %s' % ee)
                        return []

    if len(sta_arc) == 0:
        sta_arc.append([])
    sta_arc.sort()
    return sta_arc
Example #21
0
def sachdr2assoc(header, pickmap=None):
    """
    Takes a sac header dictionary, and produces a list of up to 10
    Assoc instances. Header->phase mappings follow SAC2000, i.e.:

    * t0: P
    * t1: Pn
    * t2: Pg
    * t3: S
    * t4: Sn
    * t5: Sg
    * t6: Lg
    * t7: LR
    * t8: Rg
    * t9: pP

    An alternate mapping for some or all picks can be supplied, however,
    as a dictionary of strings in the above form.

    Note: arid values will not be filled in, so do:
    >>> for assoc in kbio.tables['assoc']:
            assoc.arid = lastarid+1
            lastarid += 1

    """
    pick2phase = {'t0': 'P', 't1': 'Pn', 't2': 'Pg', 't3': 'S',
                  't4': 'Sn', 't5': 'Sg', 't6': 'Lg', 't7': 'LR', 't8': 'Rg',
                  't9': 'pP'}

    # overwrite defaults with supplied map
    if pickmap:
        pick2phase.update(pickmap)

    # geographic relations
    # obspy.read tries to calculate these values if lcalca is True and needed
    # header info is there, so we only need to try to if lcalca is False.
    # XXX: I just calculate it if no values are currently filled in.
    sac_assoc = [('az', 'esaz'),
                 ('baz', 'seaz'),
                 ('gcarc', 'delta')]

    assocdict = AttribDict()
    for hdr, col in sac_assoc:
        val = header.get(hdr, None)
        assocdict[col] = val if val != SACDEFAULT[hdr] else None

    # overwrite if any are None
    if not assocdict:
        try:
            delta = geod.locations2degrees(header['stla'], header['stlo'],
                                           header['evla'], header['evlo'])
            m, seaz, esaz = geod.gps2DistAzimuth(header['stla'], header['stlo'],
                                                 header['evla'], header['evlo'])
            assocdict['esaz'] = esaz
            assocdict['seaz'] = seaz
            assocdict['delta'] = delta
        except (ValueError, TypeError):
            # some sac header values are None
            pass

    if header.get('kstnm', None):
        assocdict['sta'] = header['kstnm']

    orid = header.get('norid', None)
    assocdict['orid'] = orid if orid != SACDEFAULT['norid'] else None

    # now, do the phase arrival mappings
    # for each pick in hdr, make a separate dictionary containing assocdict plus
    # the new phase info.
    assocs = []
    for key in pick2phase:
        kkey = 'k' + key
        # if there's a value in t[0-9]
        if header.get(key, None) not in (SACDEFAULT[key], None):
            # if the phase name kt[0-9] is null
            if header[kkey] == SACDEFAULT[kkey]:
                # take it from the map
                iassoc = {'phase': pick2phase[key]}
            else:
                # take it directly
                iassoc = {'phase': header[kkey]}

            iassoc.update(assocdict)
            assocs.append(iassoc)

    return assocs
Example #22
0
    def update(self, force=False):

        try:
            self._plot_receiver()
            self._plot_event()
        except AttributeError:
            return

        if (
            not bool(self.ui.auto_update_check_box.checkState())
            and self.ui.finsource_tab.currentIndex() == 1
            and not force
            and self.st_copy is None
        ):
            return

        components = ["z", "n", "e"]
        components_map = {0: ("Z", "N", "E"), 1: ("Z", "R", "T")}

        components_choice = int(self.ui.components_combo.currentIndex())

        label_map = {
            0: {"z": "vertical", "n": "north", "e": "east"},
            1: {"z": "vertical", "n": "radial", "e": "transverse"},
        }

        for component in components:
            p = getattr(self.ui, "%s_graph" % component)
            p.setTitle(label_map[components_choice][component].capitalize() + " component")

        if self.ui.finsource_tab.currentIndex() == 0:
            src_latitude = self.source.latitude
            src_longitude = self.source.longitude
            src_depth_in_m = self.source.depth_in_m
        else:
            src_latitude = self.finite_source.hypocenter_latitude
            src_longitude = self.finite_source.hypocenter_longitude
            src_depth_in_m = self.finite_source.hypocenter_depth_in_m

        rec = self.receiver
        try:
            # Grab resampling settings from the UI.
            if bool(self.ui.resample_check_box.checkState()):
                dt = float(self.ui.resample_factor.value())
                dt = self.instaseis_db.info.dt / dt
            else:
                dt = None
            if self.ui.finsource_tab.currentIndex() == 0:
                st = self.instaseis_db.get_seismograms(
                    source=self.source, receiver=self.receiver, dt=dt, components=components_map[components_choice]
                )
            elif (
                not bool(self.ui.auto_update_check_box.checkState())
                and self.ui.finsource_tab.currentIndex() == 1
                and not force
            ):
                st = self.st_copy.copy()
            else:
                prog_diag = QtGui.QProgressDialog("Calculating", "Cancel", 0, len(self.finite_source), self)
                prog_diag.setWindowModality(QtCore.Qt.WindowModal)
                prog_diag.setMinimumDuration(0)

                def get_prog_fct():
                    def set_value(value, count):
                        prog_diag.setValue(value)
                        if prog_diag.wasCanceled():
                            return True

                    return set_value

                prog_diag.setValue(0)
                st = self.instaseis_db.get_seismograms_finite_source(
                    sources=self.finite_source,
                    receiver=self.receiver,
                    dt=dt,
                    components=("Z", "N", "E"),
                    progress_callback=get_prog_fct(),
                )
                prog_diag.setValue(len(self.finite_source))
                if not st:
                    return

                baz = geodetics.gps2dist_azimuth(
                    self.finite_source.CMT.latitude, self.finite_source.CMT.longitude, rec.latitude, rec.longitude
                )[2]
                self.st_copy = st.copy()
                st.rotate("NE->RT", baz)
                st += self.st_copy
                self.st_copy = st.copy()

            if self.ui.finsource_tab.currentIndex() == 1 and bool(self.ui.plot_CMT_check_box.checkState()):
                st_cmt = self.instaseis_db.get_seismograms(
                    source=self.finite_source.CMT,
                    receiver=self.receiver,
                    dt=dt,
                    components=components_map[components_choice],
                    reconvolve_stf=True,
                    remove_source_shift=False,
                )
            else:
                st_cmt = None

            # check filter values from the UI
            zp = bool(self.ui.zero_phase_check_box.checkState())
            if bool(self.ui.lowpass_check_box.checkState()):
                try:
                    freq = 1.0 / float(self.ui.lowpass_period.value())
                    st.filter("lowpass", freq=freq, zerophase=zp)
                    if st_cmt is not None:
                        st_cmt.filter("lowpass", freq=freq, zerophase=zp)
                except ZeroDivisionError:
                    # this happens when typing in the lowpass_period box
                    pass

            if bool(self.ui.highpass_check_box.checkState()):
                try:
                    freq = 1.0 / float(self.ui.highpass_period.value())
                    st.filter("highpass", freq=freq, zerophase=zp)
                    if st_cmt is not None:
                        st_cmt.filter("highpass", freq=freq, zerophase=zp)
                except ZeroDivisionError:
                    # this happens when typing in the highpass_period box
                    pass

        except AttributeError:
            return

        if bool(self.ui.tt_times.checkState()):
            great_circle_distance = geodetics.locations2degrees(
                src_latitude, src_longitude, rec.latitude, rec.longitude
            )
            self.tts = tau_model.get_travel_times(
                source_depth_in_km=src_depth_in_m / 1000.0, distance_in_degree=great_circle_distance
            )

        for ic, component in enumerate(components):
            plot_widget = getattr(self.ui, "%s_graph" % component.lower())
            plot_widget.clear()
            tr = st.select(component=components_map[components_choice][ic])[0]
            times = tr.times()
            plot_widget.plot(times, tr.data, pen="k")
            plot_widget.ptp = tr.data.ptp()
            if st_cmt is not None:
                tr = st_cmt.select(component=components_map[components_choice][ic])[0]
                times = tr.times()
                plot_widget.plot(times, tr.data, pen="r")

            if bool(self.ui.tt_times.checkState()):
                tts = []
                for tt in self.tts:
                    if tt.time >= times[-1]:
                        continue
                    tts.append(tt)
                    if tt.name[0].lower() == "p":
                        pen = "#008c2866"
                    else:
                        pen = "#95000066"
                    plot_widget.addLine(x=tt.time, pen=pen, z=-10)
                self.tts = tts
        self.set_info()
Example #23
0
def f_vespagram_theoretical_arrivals(st, origin, smin, smax, ssteps, baz, winlen):
    '''
    Plots the F-stat vespagram for a seismic array over a given slowness range, for a single backazimuth, using the statistic specified. Also plots theoretical arrival times and slownesses for each phase.

    The chosen statistic is plotted as a function of time (in s) and slowness (in s/km). This may be:-
                                                                 
    * 'amplitude' - the raw amplitude of the linear or nth root stack at each time and slowness step;
    * 'power' - the power in the linear or nth root beam calculated over a time window (length winlen) around each time step for each slowness;
    * 'F' - the F-statistic of the beam calculated over a time window (length winlen) around each time step for each slowness.

    Parameters
    ----------
    st : ObsPy Stream object
        Stream of SAC format seismograms for the seismic array, length K = no. of stations in array
    origin : ObsPy Origin object
        Origin of the event in question. Should contain the origin time of the earthquake and if necessary the depth and location.
    smin  : float
        Minimum magnitude of slowness vector, in s / km
    smax  : float
        Maximum magnitude of slowness vector, in s / km
    ssteps  : int
        Integer number of steps between smin and smax for which to calculate the vespagram
    baz : float
        Backazimuth of slowness vector, (i.e. angle from North back to epicentre of event)
    winlen : int
        Length of Hann window over which to calculate the power.
    stat : string
        Statistic to use for plotting the vespagram, either 'amplitude', 'power', or 'F'
    display: string
        Option for plotting: either 'contourf' for filled contour plot, or 'contour' for contour plot. See matplotlib documentation for more details.
    '''
    
    starttime = st[0].stats.starttime
    tt_model = TauPyModel()
    
    # Arrivals are calculated from the information in origin.
    delta = locations2degrees(origin.latitude, origin.longitude, st[0].stats.sac.stla, st[0].stats.sac.stlo) # Distance in degrees from source to receiver
    arrivals = tt_model.get_travel_times(origin.depth/1000., delta)
    
    arrival_names = [arrival.name for arrival in arrivals]
    arrival_times = [origin.time + arrival.time - starttime for arrival in arrivals]
    arrival_slowness = [arrival.ray_param_sec_degree/G_KM_DEG for arrival in arrivals]
    
    plt.figure(figsize=(16, 8))

    vespagram = np.array([f_vespa(st, s, baz, winlen) for s in np.linspace(smin, smax, ssteps)])
    label = 'F'
    timestring = str(st[0].stats.starttime.datetime)
    title = timestring + ": " + label + " Vespagram"
    
    plt.contourf(st[0].times(), np.linspace(smin, smax, ssteps), vespagram[:, :])
    
    cb = plt.colorbar()
    cb.set_label(label)
    
    # Plot predicted arrivals
    plt.scatter(arrival_times, arrival_slowness, c='cyan', s=200, marker='+')
    
    plt.xlabel("Time (s)")
    plt.xlim(min(st[0].times()), max(st[0].times()))
    plt.ylim(smin, smax)
    
    # Thanks, Stack Overflow: http://stackoverflow.com/questions/5147112/matplotlib-how-to-put-individual-tags-for-a-scatter-plot
    for label, x, y in zip(arrival_names, arrival_times, arrival_slowness):
        plt.annotate(label, xy=(x, y), xytext=(-20, 20), textcoords='offset points', ha='right', va='bottom', 
                     bbox=dict(boxstyle='round,pad=0.5', fc='yellow', alpha=0.5), 
                     arrowprops=dict(arrowstyle='->', connectionstyle='arc3,rad=0'))
        
    plt.ylabel("Slowness (s / km)")
    plt.title(title)
Example #24
0
def select_windows(data_trace, synthetic_trace, event_latitude,
                   event_longitude, event_depth_in_km,
                   station_latitude, station_longitude, minimum_period,
                   maximum_period,
                   min_cc=0.10, max_noise=0.10, max_noise_window=0.4,
                   min_velocity=2.4, threshold_shift=0.30,
                   threshold_correlation=0.75, min_length_period=1.5,
                   min_peaks_troughs=2, max_energy_ratio=10.0,
                   min_envelope_similarity=0.2,
                   verbose=False, plot=False):
    """
    Window selection algorithm for picking windows suitable for misfit
    calculation based on phase differences.

    Returns a list of windows which might be empty due to various reasons.

    This function is really long and a lot of things. For a more detailed
    description, please see the LASIF paper.

    :param data_trace: The data trace.
    :type data_trace: :class:`~obspy.core.trace.Trace`
    :param synthetic_trace: The synthetic trace.
    :type synthetic_trace: :class:`~obspy.core.trace.Trace`
    :param event_latitude: The event latitude.
    :type event_latitude: float
    :param event_longitude: The event longitude.
    :type event_longitude: float
    :param event_depth_in_km: The event depth in km.
    :type event_depth_in_km: float
    :param station_latitude: The station latitude.
    :type station_latitude: float
    :param station_longitude: The station longitude.
    :type station_longitude: float
    :param minimum_period: The minimum period of the data in seconds.
    :type minimum_period: float
    :param maximum_period: The maximum period of the data in seconds.
    :type maximum_period: float
    :param min_cc: Minimum normalised correlation coefficient of the
        complete traces.
    :type min_cc: float
    :param max_noise: Maximum relative noise level for the whole trace.
        Measured from maximum amplitudes before and after the first arrival.
    :type max_noise: float
    :param max_noise_window: Maximum relative noise level for individual
        windows.
    :type max_noise_window: float
    :param min_velocity: All arrivals later than those corresponding to the
        threshold velocity [km/s] will be excluded.
    :type min_velocity: float
    :param threshold_shift: Maximum allowable time shift within a window,
        as a fraction of the minimum period.
    :type threshold_shift: float
    :param threshold_correlation: Minimum normalised correlation coeeficient
        within a window.
    :type threshold_correlation: float
    :param min_length_period: Minimum length of the time windows relative to
        the minimum period.
    :type min_length_period: float
    :param min_peaks_troughs: Minimum number of extrema in an individual
        time window (excluding the edges).
    :type min_peaks_troughs: float
    :param max_energy_ratio: Maximum energy ratio between data and
        synthetics within a time window. Don't make this too small!
    :type max_energy_ratio: float
    :param min_envelope_similarity: The minimum similarity of the envelopes of
        both data and synthetics. This essentially assures that the
        amplitudes of data and synthetics can not diverge too much within a
        window. It is a bit like the inverse of the ratio of both envelopes
        so a value of 0.2 makes sure neither amplitude can be more then 5
        times larger than the other.
    :type min_envelope_similarity: float
    :param verbose: No output by default.
    :type verbose: bool
    :param plot: Create a plot of the algortihm while it does its work.
    :type plot: bool
    """
    # Shortcuts to frequently accessed variables.
    data_starttime = data_trace.stats.starttime
    data_delta = data_trace.stats.delta
    dt = data_trace.stats.delta
    npts = data_trace.stats.npts
    synth = synthetic_trace.data
    data = data_trace.data
    times = data_trace.times()

    # Fill cache if necessary.
    if not TAUPY_MODEL_CACHE:
        from obspy.taup import TauPyModel  # NOQA
        TAUPY_MODEL_CACHE["model"] = TauPyModel("AK135")
    model = TAUPY_MODEL_CACHE["model"]

    # -------------------------------------------------------------------------
    # Geographical calculations and the time of the first arrival.
    # -------------------------------------------------------------------------
    dist_in_deg = geodetics.locations2degrees(station_latitude,
                                              station_longitude,
                                              event_latitude, event_longitude)
    dist_in_km = geodetics.calc_vincenty_inverse(
        station_latitude, station_longitude, event_latitude,
        event_longitude)[0] / 1000.0

    # Get only a couple of P phases which should be the first arrival
    # for every epicentral distance. Its quite a bit faster than calculating
    # the arrival times for every phase.
    # Assumes the first sample is the centroid time of the event.
    tts = model.get_travel_times(source_depth_in_km=event_depth_in_km,
                                 distance_in_degree=dist_in_deg,
                                 phase_list=["ttp"])
    # Sort just as a safety measure.
    tts = sorted(tts, key=lambda x: x.time)
    first_tt_arrival = tts[0].time

    # -------------------------------------------------------------------------
    # Window settings
    # -------------------------------------------------------------------------
    # Number of samples in the sliding window. Currently, the length of the
    # window is set to a multiple of the dominant period of the synthetics.
    # Make sure it is an uneven number; just to have a trivial midpoint
    # definition and one sample does not matter much in any case.
    window_length = int(round(float(2 * minimum_period) / dt))
    if not window_length % 2:
        window_length += 1

    # Use a Hanning window. No particular reason for it but its a well-behaved
    # window and has nice spectral properties.
    taper = np.hanning(window_length)

    # =========================================================================
    # check if whole seismograms are sufficiently correlated and estimate
    # noise level
    # =========================================================================

    # Overall Correlation coefficient.
    norm = np.sqrt(np.sum(data ** 2)) * np.sqrt(np.sum(synth ** 2))
    cc = np.sum(data * synth) / norm
    if verbose:
        _log_window_selection(data_trace.id,
                              "Correlation Coefficient: %.4f" % cc)

    # Estimate noise level from waveforms prior to the first arrival.
    idx_end = int(np.ceil((first_tt_arrival - 0.5 * minimum_period) / dt))
    idx_end = max(10, idx_end)
    idx_start = int(np.ceil((first_tt_arrival - 2.5 * minimum_period) / dt))
    idx_start = max(10, idx_start)

    if idx_start >= idx_end:
        idx_start = max(0, idx_end - 10)

    abs_data = np.abs(data)
    noise_absolute = abs_data[idx_start:idx_end].max()
    noise_relative = noise_absolute / abs_data.max()

    if verbose:
        _log_window_selection(data_trace.id,
                              "Absolute Noise Level: %e" % noise_absolute)
        _log_window_selection(data_trace.id,
                              "Relative Noise Level: %e" % noise_relative)

    # Basic global rejection criteria.
    accept_traces = True
    if (cc < min_cc) and (noise_relative > max_noise / 3.0):
        msg = "Correlation %.4f is below threshold of %.4f" % (cc, min_cc)
        if verbose:
            _log_window_selection(data_trace.id, msg)
        accept_traces = msg

    if noise_relative > max_noise:
        msg = "Noise level %.3f is above threshold of %.3f" % (
            noise_relative, max_noise)
        if verbose:
            _log_window_selection(
                data_trace.id, msg)
        accept_traces = msg

    # Calculate the envelope of both data and synthetics. This is to make sure
    # that the amplitude of both is not too different over time and is
    # used as another selector. Only calculated if the trace is generally
    # accepted as it is fairly slow.
    if accept_traces is True:
        data_env = obspy.signal.filter.envelope(data)
        synth_env = obspy.signal.filter.envelope(synth)

    # -------------------------------------------------------------------------
    # Initial Plot setup.
    # -------------------------------------------------------------------------
    # All the plot calls are interleaved. I realize this is really ugly but
    # the alternative would be to either have two functions (one with plots,
    # one without) or split the plotting function in various subfunctions,
    # neither of which are acceptable in my opinion. The impact on
    # performance is minimal if plotting is turned off: all imports are lazy
    # and a couple of conditionals are cheap.
    if plot:
        import matplotlib.pylab as plt  # NOQA
        import matplotlib.patheffects as PathEffects  # NOQA

        if accept_traces is True:
            plt.figure(figsize=(18, 12))
            plt.subplots_adjust(left=0.05, bottom=0.05, right=0.98, top=0.95,
                                wspace=None, hspace=0.0)
            grid = (31, 1)

            # Axes showing the data.
            data_plot = plt.subplot2grid(grid, (0, 0), rowspan=8)
        else:
            # Only show one axes it the traces are not accepted.
            plt.figure(figsize=(18, 3))

        # Plot envelopes if needed.
        if accept_traces is True:
            plt.plot(times, data_env, color="black", alpha=0.5, lw=0.4,
                     label="data envelope")
            plt.plot(synthetic_trace.times(), synth_env, color="#e41a1c",
                     alpha=0.4, lw=0.5, label="synthetics envelope")

        plt.plot(times, data, color="black", label="data", lw=1.5)
        plt.plot(synthetic_trace.times(), synth, color="#e41a1c",
                 label="synthetics",  lw=1.5)

        # Symmetric around y axis.
        middle = data.mean()
        d_max, d_min = data.max(), data.min()
        r = max(d_max - middle, middle - d_min) * 1.1
        ylim = (middle - r, middle + r)
        xlim = (times[0], times[-1])
        plt.ylim(*ylim)
        plt.xlim(*xlim)

        offset = (xlim[1] - xlim[0]) * 0.005
        plt.vlines(first_tt_arrival, ylim[0], ylim[1], colors="#ff7f00", lw=2)
        plt.text(first_tt_arrival + offset,
                 ylim[1] - (ylim[1] - ylim[0]) * 0.02,
                 "first arrival", verticalalignment="top",
                 horizontalalignment="left", color="#ee6e00",
                 path_effects=[
                     PathEffects.withStroke(linewidth=3, foreground="white")])

        plt.vlines(first_tt_arrival - minimum_period / 2.0, ylim[0], ylim[1],
                   colors="#ff7f00", lw=2)
        plt.text(first_tt_arrival - minimum_period / 2.0 - offset,
                 ylim[0] + (ylim[1] - ylim[0]) * 0.02,
                 "first arrival - min period / 2", verticalalignment="bottom",
                 horizontalalignment="right", color="#ee6e00",
                 path_effects=[
                     PathEffects.withStroke(linewidth=3, foreground="white")])

        for velocity in [6, 5, 4, 3, min_velocity]:
            tt = dist_in_km / velocity
            plt.vlines(tt, ylim[0], ylim[1], colors="gray", lw=2)
            if velocity == min_velocity:
                hal = "right"
                o_s = -1.0 * offset
            else:
                hal = "left"
                o_s = offset
            plt.text(tt + o_s, ylim[0] + (ylim[1] - ylim[0]) * 0.02,
                     str(velocity) + " km/s", verticalalignment="bottom",
                     horizontalalignment=hal, color="0.15")
        plt.vlines(dist_in_km / min_velocity + minimum_period / 2.0,
                   ylim[0], ylim[1], colors="gray", lw=2)
        plt.text(dist_in_km / min_velocity + minimum_period / 2.0 - offset,
                 ylim[1] - (ylim[1] - ylim[0]) * 0.02,
                 "min surface velocity + min period / 2",
                 verticalalignment="top",
                 horizontalalignment="right", color="0.15", path_effects=[
                     PathEffects.withStroke(linewidth=3, foreground="white")])

        plt.hlines(noise_absolute, xlim[0], xlim[1], linestyle="--",
                   color="gray")
        plt.hlines(-noise_absolute, xlim[0], xlim[1], linestyle="--",
                   color="gray")
        plt.text(offset, noise_absolute + (ylim[1] - ylim[0]) * 0.01,
                 "noise level", verticalalignment="bottom",
                 horizontalalignment="left", color="0.15",
                 path_effects=[
                     PathEffects.withStroke(linewidth=3, foreground="white")])
        plt.legend(loc="lower right", fancybox=True, framealpha=0.5,
                   fontsize="small")
        plt.gca().xaxis.set_ticklabels([])

        # Plot the basic global information.
        ax = plt.gca()
        txt = (
            "Total CC Coeff: %.4f\nAbsolute Noise: %e\nRelative Noise: %.3f"
            % (cc, noise_absolute, noise_relative))
        ax.text(0.01, 0.95, txt, transform=ax.transAxes,
                fontdict=dict(fontsize="small", ha='left', va='top'),
                bbox=dict(boxstyle="round", fc="w", alpha=0.8))
        plt.suptitle("Channel %s" % data_trace.id, fontsize="larger")

    # Show plot and return if not accepted.
        if accept_traces is not True:
            txt = "Rejected: %s" % (accept_traces)
            ax.text(0.99, 0.95, txt, transform=ax.transAxes,
                    fontdict=dict(fontsize="small", ha='right', va='top'),
                    bbox=dict(boxstyle="round", fc="red", alpha=1.0))
            plt.show()
    if accept_traces is not True:
        return []

    # Initialise masked arrays. The mask will be set to True where no
    # windows are chosen.
    time_windows = np.ma.ones(npts)
    time_windows.mask = False
    if plot:
        old_time_windows = time_windows.copy()

    # Elimination Stage 1: Eliminate everything half a period before or
    # after the minimum and maximum travel times, respectively.
    # theoretical arrival as positive.
    min_idx = int((first_tt_arrival - (minimum_period / 2.0)) / dt)
    max_idx = int(math.ceil((
        dist_in_km / min_velocity + minimum_period / 2.0) / dt))
    time_windows.mask[:min_idx + 1] = True
    time_windows.mask[max_idx:] = True
    if plot:
        plt.subplot2grid(grid, (8, 0), rowspan=1)
        _plot_mask(time_windows, old_time_windows,
                   name="TRAVELTIME ELIMINATION")
        old_time_windows = time_windows.copy()

    # -------------------------------------------------------------------------
    # Compute sliding time shifts and correlation coefficients for time
    # frames that passed the traveltime elimination stage.
    # -------------------------------------------------------------------------
    # Allocate arrays to collect the time dependent values.
    sliding_time_shift = np.ma.zeros(npts, dtype="float32")
    sliding_time_shift.mask = True
    max_cc_coeff = np.ma.zeros(npts, dtype="float32")
    max_cc_coeff.mask = True

    for start_idx, end_idx, midpoint_idx in _window_generator(npts,
                                                              window_length):
        if not min_idx < midpoint_idx < max_idx:
            continue

        # Slice windows. Create a copy to be able to taper without affecting
        # the original time series.
        data_window = data[start_idx: end_idx].copy() * taper
        synthetic_window = \
            synth[start_idx: end_idx].copy() * taper

        # Elimination Stage 2: Skip windows that have essentially no energy
        # to avoid instabilities. No windows can be picked in these.
        if synthetic_window.ptp() < synth.ptp() * 0.001:
            time_windows.mask[midpoint_idx] = True
            continue

        # Calculate the time shift. Here this is defined as the shift of the
        # synthetics relative to the data. So a value of 2, for instance, means
        # that the synthetics are 2 timesteps later then the data.
        cc = np.correlate(data_window, synthetic_window, mode="full")

        time_shift = cc.argmax() - window_length + 1
        # Express the time shift in fraction of the minimum period.
        sliding_time_shift[midpoint_idx] = (time_shift * dt) / minimum_period

        # Normalized cross correlation.
        max_cc_value = cc.max() / np.sqrt((synthetic_window ** 2).sum() *
                                          (data_window ** 2).sum())
        max_cc_coeff[midpoint_idx] = max_cc_value

    if plot:
        plt.subplot2grid(grid, (9, 0), rowspan=1)
        _plot_mask(time_windows, old_time_windows,
                   name="NO ENERGY IN CC WINDOW")
        # Axes with the CC coeffs
        plt.subplot2grid(grid, (15, 0), rowspan=4)
        plt.hlines(0, xlim[0], xlim[1], color="lightgray")
        plt.hlines(-threshold_shift, xlim[0], xlim[1], color="gray",
                   linestyle="--")
        plt.hlines(threshold_shift, xlim[0], xlim[1], color="gray",
                   linestyle="--")
        plt.text(5, -threshold_shift - (2) * 0.03,
                 "threshold", verticalalignment="top",
                 horizontalalignment="left", color="0.15",
                 path_effects=[
                     PathEffects.withStroke(linewidth=3, foreground="white")])
        plt.plot(times, sliding_time_shift, color="#377eb8",
                 label="Time shift in fraction of minimum period", lw=1.5)
        ylim = plt.ylim()
        plt.yticks([-0.75, 0, 0.75])
        plt.xticks([300, 600, 900, 1200, 1500, 1800])
        plt.ylim(ylim[0], ylim[1] + ylim[1] - ylim[0])
        plt.ylim(-1.0, 1.0)
        plt.xlim(xlim)
        plt.gca().xaxis.set_ticklabels([])
        plt.legend(loc="lower right", fancybox=True, framealpha=0.5,
                   fontsize="small")

        plt.subplot2grid(grid, (10, 0), rowspan=4)
        plt.hlines(threshold_correlation, xlim[0], xlim[1], color="0.15",
                   linestyle="--")
        plt.hlines(1, xlim[0], xlim[1], color="lightgray")
        plt.hlines(0, xlim[0], xlim[1], color="lightgray")
        plt.text(5, threshold_correlation + (1.4) * 0.01,
                 "threshold", verticalalignment="bottom",
                 horizontalalignment="left", color="0.15",
                 path_effects=[
                     PathEffects.withStroke(linewidth=3, foreground="white")])
        plt.plot(times, max_cc_coeff, color="#4daf4a",
                 label="Maximum CC coefficient", lw=1.5)
        plt.ylim(-0.2, 1.2)
        plt.yticks([0, 0.5, 1])
        plt.xticks([300, 600, 900, 1200, 1500, 1800])
        plt.xlim(xlim)
        plt.gca().xaxis.set_ticklabels([])
        plt.legend(loc="lower right", fancybox=True, framealpha=0.5,
                   fontsize="small")

    # Elimination Stage 3: Mark all areas where the normalized cross
    # correlation coefficient is under threshold_correlation as negative
    if plot:
        old_time_windows = time_windows.copy()
    time_windows.mask[max_cc_coeff < threshold_correlation] = True
    if plot:
        plt.subplot2grid(grid, (14, 0), rowspan=1)
        _plot_mask(time_windows, old_time_windows,
                   name="CORRELATION COEFF THRESHOLD ELIMINATION")

    # Elimination Stage 4: Mark everything with an absolute travel time
    # shift of more than # threshold_shift times the dominant period as
    # negative
    if plot:
        old_time_windows = time_windows.copy()
    time_windows.mask[np.ma.abs(sliding_time_shift) > threshold_shift] = True
    if plot:
        plt.subplot2grid(grid, (19, 0), rowspan=1)
        _plot_mask(time_windows, old_time_windows,
                   name="TIME SHIFT THRESHOLD ELIMINATION")

    # Elimination Stage 5: Mark the area around every "travel time shift
    # jump" (based on the traveltime time difference) negative. The width of
    # the area is currently chosen to be a tenth of a dominant period to
    # each side.
    if plot:
        old_time_windows = time_windows.copy()
    sample_buffer = int(np.ceil(minimum_period / dt * 0.1))
    indices = np.ma.where(np.ma.abs(np.ma.diff(sliding_time_shift)) > 0.1)[0]
    for index in indices:
        time_windows.mask[index - sample_buffer: index + sample_buffer] = True
    if plot:
        plt.subplot2grid(grid, (20, 0), rowspan=1)
        _plot_mask(time_windows, old_time_windows,
                   name="TIME SHIFT JUMPS ELIMINATION")

    # Clip both to avoid large numbers by division.
    stacked = np.vstack([
        np.ma.clip(synth_env, synth_env.max() * min_envelope_similarity * 0.5,
                   synth_env.max()),
        np.ma.clip(data_env, data_env.max() * min_envelope_similarity * 0.5,
                   data_env.max())])
    # Ratio.
    ratio = stacked.min(axis=0) / stacked.max(axis=0)

    # Elimination Stage 6: Make sure the amplitudes of both don't vary too
    # much.
    if plot:
        old_time_windows = time_windows.copy()
    time_windows.mask[ratio < min_envelope_similarity] = True
    if plot:
        plt.subplot2grid(grid, (25, 0), rowspan=1)
        _plot_mask(time_windows, old_time_windows,
                   name="ENVELOPE AMPLITUDE SIMILARITY ELIMINATION")

    if plot:
        plt.subplot2grid(grid, (21, 0), rowspan=4)
        plt.hlines(min_envelope_similarity, xlim[0], xlim[1], color="gray",
                   linestyle="--")
        plt.text(5, min_envelope_similarity + (2) * 0.03,
                 "threshold", verticalalignment="bottom",
                 horizontalalignment="left", color="0.15",
                 path_effects=[
                 PathEffects.withStroke(linewidth=3, foreground="white")])
        plt.plot(times, ratio, color="#9B59B6",
                 label="Envelope amplitude similarity", lw=1.5)
        plt.yticks([0, 0.2, 0.4, 0.6, 0.8, 1.0])
        plt.ylim(0.05, 1.05)
        plt.xticks([300, 600, 900, 1200, 1500, 1800])
        plt.xlim(xlim)
        plt.gca().xaxis.set_ticklabels([])
        plt.legend(loc="lower right", fancybox=True, framealpha=0.5,
                   fontsize="small")

    # First minimum window length elimination stage. This is cheap and if
    # not done it can easily destabilize the peak-and-trough marching stage
    # which would then have to deal with way more edge cases.
    if plot:
        old_time_windows = time_windows.copy()
    min_length = \
        min(minimum_period / dt * min_length_period, maximum_period / dt)
    for i in flatnotmasked_contiguous(time_windows):
        # Step 7: Throw away all windows with a length of less then
        # min_length_period the dominant period.
        if (i.stop - i.start) < min_length:
            time_windows.mask[i.start: i.stop] = True
    if plot:
        plt.subplot2grid(grid, (26, 0), rowspan=1)
        _plot_mask(time_windows, old_time_windows,
                   name="MINIMUM WINDOW LENGTH ELIMINATION 1")

    # -------------------------------------------------------------------------
    # Peak and trough marching algorithm
    # -------------------------------------------------------------------------
    final_windows = []
    for i in flatnotmasked_contiguous(time_windows):
        # Cut respective windows.
        window_npts = i.stop - i.start
        synthetic_window = synth[i.start: i.stop]
        data_window = data[i.start: i.stop]

        # Find extrema in the data and the synthetics.
        data_p, data_t = find_local_extrema(data_window)
        synth_p, synth_t = find_local_extrema(synthetic_window)

        window_mask = np.ones(window_npts, dtype="bool")

        closest_peaks = find_closest(data_p, synth_p)
        diffs = np.diff(closest_peaks)

        for idx in np.where(diffs == 1)[0]:
            if idx > 0:
                start = synth_p[idx - 1]
            else:
                start = 0
            if idx < (len(synth_p) - 1):
                end = synth_p[idx + 1]
            else:
                end = -1
            window_mask[start: end] = False

        closest_troughs = find_closest(data_t, synth_t)
        diffs = np.diff(closest_troughs)

        for idx in np.where(diffs == 1)[0]:
            if idx > 0:
                start = synth_t[idx - 1]
            else:
                start = 0
            if idx < (len(synth_t) - 1):
                end = synth_t[idx + 1]
            else:
                end = -1
            window_mask[start: end] = False

        window_mask = np.ma.masked_array(window_mask,
                                         mask=window_mask)

        if window_mask.mask.all():
            continue

        for j in flatnotmasked_contiguous(window_mask):
            final_windows.append((i.start + j.start, i.start + j.stop))

    if plot:
        old_time_windows = time_windows.copy()
    time_windows.mask[:] = True
    for start, stop in final_windows:
        time_windows.mask[start:stop] = False
    if plot:
        plt.subplot2grid(grid, (27, 0), rowspan=1)
        _plot_mask(time_windows, old_time_windows,
                   name="PEAK AND TROUGH MARCHING ELIMINATION")

    # Loop through all the time windows, remove windows not satisfying the
    # minimum number of peaks and troughs per window. Acts mainly as a
    # safety guard.
    old_time_windows = time_windows.copy()
    for i in flatnotmasked_contiguous(old_time_windows):
        synthetic_window = synth[i.start: i.stop]
        data_window = data[i.start: i.stop]
        data_p, data_t = find_local_extrema(data_window)
        synth_p, synth_t = find_local_extrema(synthetic_window)
        if np.min([len(synth_p), len(synth_t), len(data_p), len(data_t)]) < \
                min_peaks_troughs:
            time_windows.mask[i.start: i.stop] = True
    if plot:
        plt.subplot2grid(grid, (28, 0), rowspan=1)
        _plot_mask(time_windows, old_time_windows,
                   name="PEAK/TROUGH COUNT ELIMINATION")

    # Second minimum window length elimination stage.
    if plot:
        old_time_windows = time_windows.copy()
    min_length = \
        min(minimum_period / dt * min_length_period, maximum_period / dt)
    for i in flatnotmasked_contiguous(time_windows):
        # Step 7: Throw away all windows with a length of less then
        # min_length_period the dominant period.
        if (i.stop - i.start) < min_length:
            time_windows.mask[i.start: i.stop] = True
    if plot:
        plt.subplot2grid(grid, (29, 0), rowspan=1)
        _plot_mask(time_windows, old_time_windows,
                   name="MINIMUM WINDOW LENGTH ELIMINATION 2")

    # Final step, eliminating windows with little energy.
    final_windows = []
    for j in flatnotmasked_contiguous(time_windows):
        # Again assert a certain minimal length.
        if (j.stop - j.start) < min_length:
            continue

        # Compare the energy in the data window and the synthetic window.
        data_energy = (data[j.start: j.stop] ** 2).sum()
        synth_energy = (synth[j.start: j.stop] ** 2).sum()
        energies = sorted([data_energy, synth_energy])
        if energies[1] > max_energy_ratio * energies[0]:
            if verbose:
                _log_window_selection(
                    data_trace.id,
                    "Deselecting window due to energy ratio between "
                    "data and synthetics.")
            continue

        # Check that amplitudes in the data are above the noise
        if noise_absolute / data[j.start: j.stop].ptp() > \
                max_noise_window:
            if verbose:
                _log_window_selection(
                    data_trace.id,
                    "Deselecting window due having no amplitude above the "
                    "signal to noise ratio.")
        final_windows.append((j.start, j.stop))

    if plot:
        old_time_windows = time_windows.copy()
    time_windows.mask[:] = True
    for start, stop in final_windows:
        time_windows.mask[start:stop] = False

    if plot:
        plt.subplot2grid(grid, (30, 0), rowspan=1)
        _plot_mask(time_windows, old_time_windows,
                   name="LITTLE ENERGY ELIMINATION")

    if verbose:
        _log_window_selection(
            data_trace.id,
            "Done, Selected %i window(s)" % len(final_windows))

    # Final step is to convert the index value windows to actual times.
    windows = []
    for start, stop in final_windows:
        start = data_starttime + start * data_delta
        stop = data_starttime + stop * data_delta
        windows.append((start, stop))

    if plot:
        # Plot the final windows to the data axes.
        import matplotlib.transforms as mtransforms  # NOQA
        ax = data_plot
        trans = mtransforms.blended_transform_factory(ax.transData,
                                                      ax.transAxes)
        for start, stop in final_windows:
            ax.fill_between([start * data_delta, stop * data_delta], 0, 1,
                            facecolor="#CDDC39", alpha=0.5, transform=trans)

        plt.show()

    return windows
Example #25
0
    def test_iris_example_queries_station(self):
        """
        Tests the (sometimes modified) example queries given on IRIS webpage.

        This test used to download files but that is almost impossible to
        keep up to date - thus it is now a bit smarter and tests the
        returned inventory in different ways.
        """
        client = self.client

        # Radial query.
        inv = client.get_stations(latitude=-56.1, longitude=-26.7,
                                  maxradius=15)
        self.assertGreater(len(inv.networks), 0)  # at least one network
        for net in inv:
            self.assertGreater(len(net.stations), 0)  # at least one station
            for sta in net:
                dist = locations2degrees(sta.latitude, sta.longitude,
                                         -56.1, -26.7)
                # small tolerance for WGS84.
                self.assertGreater(15.1, dist, "%s.%s" % (net.code,
                                                          sta.code))

        # Misc query.
        inv = client.get_stations(
            startafter=UTCDateTime("2003-01-07"),
            endbefore=UTCDateTime("2011-02-07"), minlatitude=15,
            maxlatitude=55, minlongitude=170, maxlongitude=-170, network="IM")
        self.assertGreater(len(inv.networks), 0)  # at least one network
        for net in inv:
            self.assertGreater(len(net.stations), 0)  # at least one station
            for sta in net:
                msg = "%s.%s" % (net.code, sta.code)
                self.assertGreater(sta.start_date, UTCDateTime("2003-01-07"),
                                   msg)
                if sta.end_date is not None:
                    self.assertGreater(UTCDateTime("2011-02-07"), sta.end_date,
                                       msg)
                self.assertGreater(sta.latitude, 14.9, msg)
                self.assertGreater(55.1, sta.latitude, msg)
                self.assertFalse(-170.1 <= sta.longitude <= 170.1, msg)
                self.assertEqual(net.code, "IM", msg)

        # Simple query
        inv = client.get_stations(
            starttime=UTCDateTime("2000-01-01"),
            endtime=UTCDateTime("2001-01-01"), net="IU", sta="ANMO")
        self.assertGreater(len(inv.networks), 0)  # at least one network
        for net in inv:
            self.assertGreater(len(net.stations), 0)  # at least one station
            for sta in net:
                self.assertGreater(UTCDateTime("2001-01-01"), sta.start_date)
                if sta.end_date is not None:
                    self.assertGreater(sta.end_date, UTCDateTime("2000-01-01"))
                self.assertEqual(net.code, "IU")
                self.assertEqual(sta.code, "ANMO")

        # Station wildcard query.
        inv = client.get_stations(
            starttime=UTCDateTime("2000-01-01"),
            endtime=UTCDateTime("2002-01-01"), network="IU", sta="A*",
            location="00")
        self.assertGreater(len(inv.networks), 0)  # at least one network
        for net in inv:
            self.assertGreater(len(net.stations), 0)  # at least one station
            for sta in net:
                self.assertGreater(UTCDateTime("2002-01-01"), sta.start_date)
                if sta.end_date is not None:
                    self.assertGreater(sta.end_date, UTCDateTime("2000-01-01"))
                self.assertEqual(net.code, "IU")
                self.assertTrue(sta.code.startswith("A"))
Example #26
0
 def assertLoc(lat1, long1, lat2, long2, approx_distance):
     self.assertTrue(abs(math.radians(locations2degrees(
         lat1, long1, lat2, long2)) * 6371 - approx_distance) <= 20)
Example #27
0
def data_request(client_name, cat_client_name, start, end, minmag, net=None, scode="*", channels="*", minlat=None,
                 maxlat=None,minlon=None,maxlon=None, station_minlat=None,
                 station_maxlat=None, station_minlon=None, station_maxlon=None, mindepth=None, maxdepth=None, 
                 radialcenterlat=None, radialcenterlon=None, minrad=None, maxrad=None,
                 station_radialcenterlat=None, station_radialcenterlon=None, station_minrad=None, station_maxrad=None,
                 azimuth=None, baz=False, t_before_first_arrival=1, t_after_first_arrival=9, savefile=False, file_format='SAC'):
	"""
	Searches in a given Database for seismic data. Restrictions in terms of starttime, endtime, network etc can be made.
	If data is found it returns a stream variable, with the waveforms, an inventory with all station and network information
	and a catalog with the event information.

	:param client_name: Name of desired fdsn client, for a list of all clients see: 
		                https://docs.obspy.org/tutorial/code_snippets/retrieving_data_from_datacenters.html
	:type  client_name:  string

	:param cat_client_name: Name of Event catalog

	:type  cat_client_name: string

	:param start, end: starttime, endtime
	:type : UTCDateTime

	:param minmag: Minimum magnitude of event
	:type  minmag: float

	:param net: Network code for which to search data for
	:type  net: string

	:param scode: Station code for which to search data for
	:type  scode: string

	:param channels: Used channels of stations 
	:type  channels: string

	:param minlat, maxlat, minlon, maxlon: Coordinate-window of interest
	:type : float

	:param mindepth, maxdepth: depth information of event in km
	:type : float

	:param radialcenterlat, radialcenterlon: Centercoordinates of a radialsearch, if radialsearch=True
	:type : float

	:param minrad, maxrad: Minimum and maximum radii for radialsearch
	:type : float

	:param azimuth: Desired range of azimuths of event, station couples in deg as a list [minimum azimuth, maximum azimuth]
	:type  azimuth: list

	:param baz: Desired range of back-azimuths of event, station couples in deg as a list [minimum back azimuth, maximum back azimuth]
	:type  baz: list

	:param t_before_first_arrival, t_before_after_arrival: Length of the seismograms, startingpoint, minutes before 1st arrival and
															minutes after 1st arrival.
	:type  t_before_first_arrival, t_before_after_arrival: float, int
	
	:param savefile: if True, Stream, Inventory and Catalog will be saved local, in the current directory.
	:type  savefile: bool

	:param format: File-format of the data, for supported formats see: https://docs.obspy.org/packages/autogen/obspy.core.stream.Stream.write.html#obspy.core.stream.Stream.write
	:type  format: string
	
	returns

	:param: list_of_stream, Inventory, Catalog
	:type: list, obspy, obspy 



	### Example 1 ###

	from obspy import UTCDateTime
	from sipy.util.data_request import data_request

	start = UTCDateTime(2010,1,1,0,0)
	end = UTCDateTime(2010,12,31,0,0)
	minmag = 8
	station = '034A'
	list_of_stream, inventory, cat = data_request('IRIS', start, end, minmag, net='TA', scode=station)
	
	st = list_of_stream[0]
	st = st.select(channel='BHZ')
	st.normalize()
	inv = inventory[0]

	st.plot()
	inv.plot()
	cat.plot()

	### Example 2 ###

	from obspy import UTCDateTime
	from sipy.util.data_request import data_request

	start = UTCDateTime(2010,1,1,0,0)
	end = UTCDateTime(2010,12,31,0,0)
	minmag = 8
	station = '034A'
	client = 'IRIS'
	cat_client = 'globalcmt'
	list_of_stream, inventory, cat = data_request(client, cat_client, start, end, minmag, net='TA', scode=station)
	
	st = list_of_stream[0]
	st = st.select(channel='BHZ')
	st.normalize()
	inv = inventory[0]

	st.plot()
	inv.plot()
	cat.plot()

	"""

	data =[]
	stream = Stream()
	streamall = []
	

	#build in different approach for catalog search, using urllib

	if cat_client_name == 'globalcmt':
		catalog = request_gcmt(starttime=start, endtime=end, minmagnitude=minmag, mindepth=mindepth, maxdepth=maxdepth, minlatitude=minlat, maxlatitude=maxlat, minlongitude=minlon, maxlongitude=maxlon)
		client = Client(client_name)
	else:	
		client = Client(client_name)
		try:
			catalog = client.get_events(starttime=start, endtime=end, minmagnitude=minmag, mindepth=mindepth, maxdepth=maxdepth, latitude=radialcenterlat, longitude=radialcenterlon, minradius=minrad, maxradius=maxrad,minlatitude=minlat, maxlatitude=maxlat, minlongitude=minlon, maxlongitude=maxlon)

		except:
			print("No events found for given parameters.")
			return

	print("Following events found: \n")
	print(catalog)
	m = TauPyModel(model="ak135")
	Plist = ["P", "Pdiff", "p"]
	for event in catalog:
		print("\n")
		print("########################################")
		print("Looking for available data for event: \n")
		print(event.short_str())
		print("\n")

		origin_t = event.origins[0].time
		station_stime = UTCDateTime(origin_t - 3600*24)
		station_etime = UTCDateTime(origin_t + 3600*24)

		try:
			inventory = client.get_stations(network=net, station=scode, level="station", starttime=station_stime, endtime=station_etime,
			 								minlatitude=station_minlat, maxlatitude=station_maxlat, minlongitude=station_minlon, maxlongitude=station_maxlon,
			 								latitude=station_radialcenterlat, longitude=station_radialcenterlon, minradius=station_minrad, maxradius=station_maxrad)
			print("Inventory found.")
		except:
			print("No Inventory found for given parameters")
			return
		
		for network in inventory:

			elat = event.origins[0].latitude
			elon = event.origins[0].longitude
			depth = event.origins[0].depth/1000.

			array_fits = True
			if azimuth or baz:
				cog=center_of_gravity(network)
				slat = cog['latitude']
				slon = cog['longitude']			
				epidist = locations2degrees(slat,slon,elat,elon)
				arrivaltime = m.get_travel_times(source_depth_in_km=depth, distance_in_degree=epidist,
							                        phase_list=Plist)

				P_arrival_time = arrivaltime[0]

				Ptime = P_arrival_time.time
				tstart = UTCDateTime(event.origins[0].time + Ptime - t_before_first_arrival * 60)
				tend = UTCDateTime(event.origins[0].time + Ptime + t_after_first_arrival * 60)


				center = geometrical_center(inv)
				clat = center['latitude']
				clon = center['longitude']
				if azimuth:
					print("Looking for events in the azimuth range of %f to %f" % (azimuth[0], azimuth[1]) )
					center_az = gps2dist_azimuth(clat, clon, elat, elon)[1]
					if center_az > azimuth[1] and center_az < azimuth[0]: 
						print("Geometrical center of Array out of azimuth bounds, \ncheking if single stations fit")
						array_fits = False

				elif baz:
					print("Looking for events in the back azimuth range of %f to %f" %(baz[0], baz[1]))
					center_baz = gps2dist_azimuth(clat, clon, elat, elon)[2]
					if center_baz > baz[1] and center_baz < baz[0]: 
						print("Geometrical center of Array out of back azimuth bounds, \ncheking if single stations fit")
						array_fits = False

			# If array fits to azimuth/back azimuth or no azimuth/back azimuth is given
			no_of_stations = 0
			if array_fits:

				for station in network:

					epidist = locations2degrees(station.latitude,station.longitude,elat,elon)
					arrivaltime = m.get_travel_times(source_depth_in_km=depth, distance_in_degree=epidist,
								                        phase_list=Plist)

					P_arrival_time = arrivaltime[0]

					Ptime = P_arrival_time.time
					tstart = UTCDateTime(event.origins[0].time + Ptime - t_before_first_arrival * 60)
					tend = UTCDateTime(event.origins[0].time + Ptime + t_after_first_arrival * 60)

					try:
						streamreq = client.get_waveforms(network=network.code, station=station.code, location='*', channel=channels, starttime=tstart, endtime=tend, attach_response=True)
						no_of_stations += 1
						print("Downloaded data for %i of %i available stations!" % (no_of_stations, network.selected_number_of_stations), end='\r' )
						sys.stdout.flush()
						stream 		   += streamreq
						try:
							if inventory_used:
								inventory_used 	+= client.get_stations(network=net, station=scode, level="station", starttime=station_stime, endtime=station_etime,
			 								minlatitude=station_minlat, maxlatitude=station_maxlat, minlongitude=station_minlon, maxlongitude=station_maxlon,
			 								latitude=station_radialcenterlat, longitude=station_radialcenterlon, minradius=station_minrad, maxradius=station_maxrad)
									
						except:
								inventory_used 	 = client.get_stations(network=net, station=scode, level="station", starttime=station_stime, endtime=station_etime,
			 								minlatitude=station_minlat, maxlatitude=station_maxlat, minlongitude=station_minlon, maxlongitude=station_maxlon,
			 								latitude=station_radialcenterlat, longitude=station_radialcenterlon, minradius=station_minrad, maxradius=station_maxrad)
					except:
						continue


			# If not checking each station individually.
			else:
				for station in network:
					epidist = locations2degrees(station.latitude,station.longitude,elat,elon)
					arrivaltime = m.get_travel_times(source_depth_in_km=depth, distance_in_degree=epidist,
								                        phase_list=Plist)


					P_arrival_time = arrivaltime[0]

					Ptime = P_arrival_time.time
					tstart = UTCDateTime(event.origins[0].time + Ptime - t_before_first_arrival * 60)
					tend = UTCDateTime(event.origins[0].time + Ptime + t_after_first_arrival * 60)

					fit = False
					if azimuth:
						stat_az = gps2dist_azimuth(station.latitude, station.longitude, elat, elon)[1]
						if stat_az > azimuth[1] and stat_az < azimuth[0]: fit = True
					elif baz:
						stat_baz = gps2dist_azimuth(station.latitude, station.longitude, elat, elon)[2]
						if stat_baz > baz[1] and stat_baz < baz[0]: fit = True
					if fit:
						try:
							streamreq = client.get_waveforms(network = network.code, station = station.code, location='*', channel = channels, startime = tstart, endtime = tend, attach_response = True)
							no_of_stations += 1
							print("Downloaded data for %i of %i available stations!" % (no_of_stations, network.selected_number_of_stations), end='\r' )
							sys.stdout.flush()
							stream 		+= streamreq
							try:
								if inventory_used:
									inventory_used 	+= client.get_stations(network=net, station=scode, level="station", starttime=station_stime, endtime=station_etime,
			 								minlatitude=station_minlat, maxlatitude=station_maxlat, minlongitude=station_minlon, maxlongitude=station_maxlon,
			 								latitude=station_radialcenterlat, longitude=station_radialcenterlon, minradius=station_minrad, maxradius=station_maxrad)
							except:
									inventory_used 	 = client.get_stations(network=net, station=scode, level="station", starttime=station_stime, endtime=station_etime,
			 								minlatitude=station_minlat, maxlatitude=station_maxlat, minlongitude=station_minlon, maxlongitude=station_maxlon,
			 								latitude=station_radialcenterlat, longitude=station_radialcenterlon, minradius=station_minrad, maxradius=station_maxrad)
						except:

							continue

		try:
			if invall:
				invall += inventory
		except:
			invall 		= inventory

		attach_network_to_traces(stream, inventory)
		attach_coordinates_to_traces(stream, inventory, event)
		streamall.append(stream)
		stream = Stream()

	if savefile:
		stname = str(origin_t).split('.')[0] + ".MSEED"
		invname = stname + "_inv.xml"
		catname = stname + "_cat.xml"
		stream.write(stname, format=file_format)
		inventory.write(invname, format="STATIONXML")
		catalog.write(catname, format="QUAKEML")

	plt.ion()
	#invall.plot()
	#catalog.plot()
	plt.ioff()
	inventory = invall
	list_of_stream = streamall
	return(list_of_stream, inventory, catalog)
Example #28
0
    def test_locations2degrees(self):
        """
        Test the location 2 degree conversion.
        """
        # Inline method to avoid messy code.
        def assert_loc(lat1, long1, lat2, long2, approx_distance):
            self.assertTrue(abs(math.radians(locations2degrees(
                lat1, long1, lat2, long2)) * 6371 - approx_distance) <= 20)

        # Approximate values from the Great Circle Calculator:
        #   http://williams.best.vwh.net/gccalc.htm

        # Random location.
        assert_loc(36.12, -86.67, 33.94, -118.40, 2893)
        # Test several combinations of quadrants.
        assert_loc(11.11, 22.22, 33.33, 44.44, 3346)
        assert_loc(-11.11, -22.22, -33.33, -44.44, 3346)
        assert_loc(11.11, 22.22, -33.33, -44.44, 8596)
        assert_loc(-11.11, -22.22, 33.33, 44.44, 8596)
        assert_loc(11.11, -22.22, 33.33, -44.44, 3346)
        assert_loc(-11.11, 22.22, 33.33, 44.44, 5454)
        assert_loc(11.11, -22.22, 33.33, 44.44, 7177)
        assert_loc(11.11, 22.22, -33.33, 44.44, 5454)
        assert_loc(11.11, 22.22, 33.33, -44.44, 7177)
        # Test some extreme values.
        assert_loc(90, 0, 0, 0, 10018)
        assert_loc(180, 0, 0, 0, 20004)
        assert_loc(0, 90, 0, 0, 10018)
        assert_loc(0, 180, 0, 0, 20004)
        assert_loc(0, 0, 90, 0, 10018)
        assert_loc(0, 0, 180, 0, 20004)
        assert_loc(0, 0, 0, 90, 10018)
        assert_loc(0, 0, 0, 180, 20004)
        assert_loc(11, 55, 11, 55, 0)

        # test numpy inputs:
        # Inline method to avoid messy code.
        def assert_loc_np(lat1, long1, lat2, long2,
                          approx_distance, expected_output_len):
            loc2deg = locations2degrees(np.array(lat1),
                                        np.array(long1),
                                        np.array(lat2),
                                        np.array(long2))
            self.assertTrue((np.abs(np.radians(loc2deg) * 6371 -
                                    approx_distance) <= 20).all())
            self.assertTrue(np.isscalar(loc2deg)
                            if expected_output_len == 0 else
                            len(loc2deg) == expected_output_len)

        # Test just with random location (combining scalars and arrays).
        assert_loc_np(36.12, -86.67, 33.94, -118.40, 2893, 0)
        assert_loc_np([36.12, 36.12], -86.67, 33.94, -118.40,
                      2893, 2)
        assert_loc_np(36.12, [-86.67, -86.67], 33.94, -118.40,
                      2893, 2)
        assert_loc_np(36.12, -86.67, [33.94, 33.94], -118.40,
                      2893, 2)
        assert_loc_np(36.12, -86.67, 33.94, [-118.40, -118.40],
                      2893, 2)
        assert_loc_np([36.12, 36.12], [-86.67, -86.67], 33.94, -118.40,
                      2893, 2)
        assert_loc_np([36.12, 36.12], -86.67, [33.94, 33.94], -118.40,
                      2893, 2)
        assert_loc_np([36.12, 36.12], -86.67, 33.94, [-118.40, -118.40],
                      2893, 2)
        assert_loc_np([36.12, 36.12], [-86.67, -86.67], [33.94, 33.94],
                      -118.40, 2893, 2)
        assert_loc_np([36.12, 36.12], -86.67, [33.94, 33.94],
                      [-118.40, -118.40], 2893, 2)
        assert_loc_np(36.12, [-86.67, -86.67], [33.94, 33.94],
                      [-118.40, -118.40], 2893, 2)
        assert_loc_np([36.12, 36.12], [-86.67, -86.67], [33.94, 33.94],
                      [-118.40, -118.40], 2893, 2)

        # test numpy broadcasting (bad shapes)
        with self.assertRaises(ValueError):
            locations2degrees(1, 2, [3, 4], [5, 6, 7])