Exemplo n.º 1
0
 def _azimuth(lat1, lon1, lat2, lon2):
     """
     The azimuth(unit:degree) starting from point1 to
     point 2
     """
     _, azi, _ = gps2dist_azimuth(lat1, lon1, lat2, lon2)
     return azi
 def set_metadata(self):
     """
     Set the metadata for the station
     """
     stats = self.stream[0].stats
     self._starttime = stats.starttime
     self._station_code = stats.station
     if 'coordinates' not in stats:
         self._elevation = np.nan
         self._coordinates = (np.nan, np.nan)
         return
     lat = stats.coordinates.latitude
     lon = stats.coordinates.longitude
     if 'elevation' not in stats.coordinates or np.isnan(stats.coordinates.elevation):
         elev = 0
     else:
         elev = stats.coordinates.elevation
     self._elevation = elev
     self._coordinates = (lat, lon)
     if self.event is not None:
         event = self.event
         dist, _, _ = gps2dist_azimuth(lat, lon,
                                       event.latitude,
                                       event.longitude)
         self._epicentral_distance = dist / 1000
         if event.depth is not None:
             self._hypocentral_distance = distance(lat, lon, elev / 1000,
                                                   event.latitude,
                                                   event.longitude,
                                                   event.depth / 1000)
Exemplo n.º 3
0
def pgd_regression(home,project_name,run_name,run_number,norm=2):
    '''
    Regress for PGD scaling law
    '''
    
    from numpy import genfromtxt,array,zeros,log10,expand_dims,ones,diag,c_
    from string import replace
    from obspy.geodetics.base import gps2dist_azimuth
    #from l1 import l1
    #from cvxopt import matrix 
    from scipy.linalg import norm as vecnorm
    from numpy.linalg import lstsq
     
    # Read summary file
    summary_file=home+project_name+'/output/waveforms/'+run_name+'.'+run_number+'/_summary.'+run_name+'.'+run_number+'.txt'
    lonlat=genfromtxt(summary_file,usecols=[1,2])
    pgd=genfromtxt(summary_file,usecols=[6])*100
    # Get hypocenter or centroid
    event_log=home+project_name+'/output/ruptures/'+run_name+'.'+run_number+'.log'
    f=open(event_log,'r')
    loop_go=True
    while loop_go:
        line=f.readline()
        if 'Centroid (lon,lat,z[km])' in line:                
            s=replace(line.split(':')[-1],'(','')
            s=replace(s,')','')
            hypo=array(s.split(',')).astype('float')
            loop_go=False       
        if 'Actual magnitude' in line:
            Mw=float(line.split(':')[-1].split(' ')[-1])
    #compute station to hypo distances
    d=zeros(len(lonlat))
    for k in range(len(lonlat)):
        d[k],az,baz=gps2dist_azimuth(lonlat[k,1],lonlat[k,0],hypo[1],hypo[0])
        d[k]=d[k]/1000
        
    #Run regression
    #W=ones(len(d))/vecnorm(log10(d))
    W=ones(len(d))
    #Define regression quantities
    dist=log10(d)
    data=log10(pgd)
    #make matrix of event weights
    W=diag(W)
    #Make matrix of data weights
    iall=ones((len(d),1))
    Mw_all=Mw*ones(len(d))
    G=c_[iall,expand_dims(Mw_all,1)*iall,expand_dims(Mw_all*dist,1)]
    #Run regression
    # log(PGD)=A+B*Mw+C*Mw*log(R)
    if norm==2:
        coefficients=lstsq(G,data)[0]
        A=coefficients[0] ; B=coefficients[1] ; C=coefficients[2]
    elif norm==1:
        P=matrix(W.dot(G))
        q=matrix(W.dot(data))
        coefficients=array(l1(P,q))
    
    return A,B,C
Exemplo n.º 4
0
    def _check_catalog(self,time,lat,lon,timewindow,distwindow):
        """Check the GeoNet website to find the GeoNet ID for our event.

        :param etime:
          Datetime of origin.
        :param lat:
          Latitude of origin.
        :param lon:
          Longitude of origin.
        :param timewindow:
          Time search window (seconds).  Events will be searched from etime+/-timewindow.
        :param distwindow:
          Search radius, in km.
        :returns:
          Tuple of Event ID, Event Time.
        """
        stime = time - timedelta(seconds=NZCATWINDOW)
        etime = time + timedelta(seconds=NZCATWINDOW)
        url = CATBASE.replace('[START]',stime.strftime(TIMEFMT))
        url = url.replace('[END]',etime.strftime(TIMEFMT))
        try:
            fh = urllib.request.urlopen(url)
            data = fh.read().decode('utf-8')
            fh.close()
            lines = data.split('\n')
            vectors = []
            eidlist = []
            etimelist = []
            for line in lines[1:]:
                if not len(line.strip()):
                    break
                #time is column 2, longitude is column 4, latitude is column 5
                parts = line.split(',')
                eid = parts[0]
                etime = datetime.strptime(parts[2][0:19],TIMEFMT)
                elat = float(parts[5])
                elon = float(parts[4])
                if etime > time:
                    dt = etime - time
                else:
                    dt = time - etime
                nsecs = dt.days*86400 + dt.seconds
                dd,az1,az2 = gps2dist_azimuth(lat,lon,elat,elon)
                dd = dd/1000.0
                if nsecs <= timewindow and dd < distwindow:
                    vectors.append(np.sqrt(nsecs**2+dd**2))
                    eidlist.append(eid)
                    etimelist.append(etime)
            if len(vectors):
                idx = vectors.index(min(vectors))
                return (eidlist[idx],etimelist[idx])
        except Exception as msg:
            raise Exception('Could not access the GeoNet website - got error "%s"' % str(msg))
        return (None,None)
Exemplo n.º 5
0
    def traceToAmps(self,traces=None,periods=[0.3,1.0,3.0]):
        """Convert a set of traces to peak ground motions, return as a DataFrame.

        :param traces:
          List of Obspy Trace objects.  Can be velocity or acceleration data.
        :param periods:
          Sequence of spectral periods at which pseudo-spectral peaks should be computed.
        :returns:
          DataFrame containing the following columns:
            - netid
            - name
            - code
            - loc
            - lat
            - lon
            - dist
            - source
            - insttype
            - commtype
            - intensity
            and then a number of intensity measure types, typically including:
            - pga
            - pgv
            - psa03
            - psa10
            - psa30
            and possibly a number of other pseudo-spectral periods.
        """
        pcolumns = [get_period_name(p) for p in periods]
        columns = ['netid','name','code','loc','lat','lon','dist','source','insttype','commtype','intensity','pga','pgv'] + pcolumns
        df = pd.DataFrame(data=None,columns=columns)
        for trace in traces:
            row = {}
            stationdict = self._getStationMetadata(trace)
            trace = self._calibrateTrace(trace)
            peaks = self._get_peaks(trace,periods)
            row['netid'] = stationdict['netid']
            row['name'] = stationdict['name']
            row['code'] = stationdict['code']
            row['channel'] = stationdict['channel']
            row['loc'] = stationdict['loc']
            row['lat'] = stationdict['lat']
            row['lon'] = stationdict['lon']
            row['dist'] = gps2dist_azimuth(self._origin['lat'],self._origin['lon'],row['lat'],row['lon'])[0]/1000.0
            row['source'] = stationdict['source']
            row['insttype'] = stationdict['insttype']
            row['commtype'] = stationdict['commtype']
            row['intensity'] = ''
            for key,value in peaks.items():
                row[key] = value
            df = df.append(row,ignore_index=True)
        return df
Exemplo n.º 6
0
def coords2azbazinc(stream, origin):
    """
    Returns azimuth, backazimuth and incidence angle from station coordinates
    given in first trace of stream and from event location specified in origin
    dictionary.
    """
    sta_coords = stream[0].stats.coordinates
    dist, bazim, azim = gps2dist_azimuth(sta_coords.latitude,
            sta_coords.longitude, float(origin.latitude),
            float(origin.longitude))
    elev_diff = sta_coords.elevation - float(origin.depth)
    inci = math.atan2(dist, elev_diff) * 180.0 / math.pi
    return azim, bazim, inci
def get_nga_record_sequence_no(st, eq_name, distance_tolerance=50):
    """
    Returns the associate NGA record sequence number for a given StationStream.

    Args:
        st (gmprocess.stationstream.StationStream):
            Station stream to get record sequence number for.
        eq_name (str):
            Earthquake name for finding NGA record sequence numbers. Must
            match a value in the 'Earthquake Name' column of the file
            gmprocess/data/nga_w2_selected.csv.
        distance_tolerance (float):
            Distance tolerance (in meters) between StationStream location
            coordinates and the NGA location coordinates.
            Default is 50 meters.

    Returns:
        int: Matching record sequence number from NGA flatfile. Returns
            numpy.nan if record sequence number is not found.

    """

    df_nga = pd.read_csv(pkg_resources.resource_filename(
        'gmprocess', os.path.join('data', 'nga_w2_selected.csv')))

    nga_event = df_nga.loc[df_nga['Earthquake Name'] == eq_name]

    lat = st[0].stats.coordinates.latitude
    lon = st[0].stats.coordinates.longitude

    matched_records_nos = []
    for record_idx, record in nga_event.iterrows():
        dist = gps2dist_azimuth(
            lat, lon, record['Station Latitude'],
            record['Station Longitude'])[0]
        if dist < distance_tolerance:
            matched_records_nos.append(record['Record Sequence Number'])

    if len(matched_records_nos) > 1:
        logging.warning('Found multiple matching records.')
        return np.nan
    elif len(matched_records_nos) < 1:
        logging.warning('Did not find any matching records.')
        return np.nan
    else:
        return matched_records_nos[0]
Exemplo n.º 8
0
def combinations_in_dist_range(comb_list,stations,lle_df,min_distance,max_distance) :

    """Filters station combination list within a distance range
    :type stations: list with items of the form 'NET.STATION'
    :param stations: stations to be used if present
    :type lle_df: :class:`~pandas.DataFrame`
    :param lle_df: The dataframe that has as index the stations name and 3 columns
        lat, lon and ele.
    :type min_distance: float
    :param min_distance: minimum distance separation (km) of station combination to accept
    :type max_distance: float
    :param max_distance: maximum distance separation (km) of station combination to accept
    :rtype: list
    :return: list containing two list [[IDs of first trace],[IDs of second trace]]
    """


    # Find locations from coordinates file for station set defined in parameter file
    lats,lons=[],[]
    for s in stations :
        row=lle_df.ix[s+".*.*"]
        lats.append(row['latitude'])
        lons.append(row['longitude'])
    m_lat,m_lon=np.array(lats),np.array(lons)

    # Distance matrix to calculate separations (in km)
    size = (len(m_lat),len(m_lon))
    distance_matrix = np.zeros(size)
    for idx in range(len(m_lat)):
        for idy in range(len(m_lat)):
            dist,_,_ = gps2dist_azimuth(m_lat[idx], m_lon[idx], m_lat[idy], m_lon[idy])
            distance_matrix[idx,idy] = dist/1000
    distance_df=pd.DataFrame(data=distance_matrix,index=pd.Index(stations),
                                columns=pd.Index(stations))

    # For each field of intital combination list, get station names, find separation
    # from distance matrix, apply filter test and pass to output
    first,second=comb_list[0],comb_list[1]
    filt_first,filt_second=[],[]
    for i in range(0,len(first)) :
        i1,i2=".".join(first[i].split('.')[0:2]),".".join(second[i].split('.')[0:2])
        separation=distance_df.loc[i2].loc[i1]
        if ((separation > float(min_distance)) and (separation < float(max_distance))):
            filt_first.append(first[i])
            filt_second.append(second[i])
    return [filt_first,filt_second]
Exemplo n.º 9
0
def src2sta(station_file,source,output_coordinates=False):
    '''
    Compute cartesian source to station distances and azimuths for all station/source pairs.
    
    IN:
        station_file: Path to station file
        source: numpy 1d array with source info read from file
        coord_type: =0 if coordinates are cartesian, =1 if they are lat/lon
    OUT:
        d - sorted distances vector in km
        az - azimuth from source to station in degrees
    '''
    
    from numpy import genfromtxt,zeros,array
    from obspy.geodetics.base import gps2dist_azimuth
    
    
    #Read station file
    #staname=genfromtxt(home+station_file,dtype="S6",usecols=0)
    x=genfromtxt(station_file,dtype="f8",usecols=1)
    y=genfromtxt(station_file,dtype="f8",usecols=2)
    if x.shape==() or y.shape==(): #Single station file
        x=array([x])
        y=array([y])
    d=zeros(x.shape)
    az=zeros(x.shape)
    baz=zeros(x.shape)
    xs=source[1]
    ys=source[2]
    for k in range(len(x)):
        d[k],az[k],baz[k]=gps2dist_azimuth(ys,xs,y[k],x[k])
    d=d/1000
    
    if output_coordinates==True:
        return d,az,x,y
    else:
        return d,az
Exemplo n.º 10
0
def src2sta(station_file, source, output_coordinates=False):
    '''
    Compute cartesian source to station distances and azimuths for all station/source pairs.
    
    IN:
        station_file: Path to station file
        source: numpy 1d array with source info read from file
        coord_type: =0 if coordinates are cartesian, =1 if they are lat/lon
    OUT:
        d - sorted distances vector in km
        az - azimuth from source to station in degrees
    '''

    from numpy import genfromtxt, zeros, array
    from obspy.geodetics.base import gps2dist_azimuth

    #Read station file
    #staname=genfromtxt(home+station_file,dtype="S6",usecols=0)
    x = genfromtxt(station_file, dtype="f8", usecols=1)
    y = genfromtxt(station_file, dtype="f8", usecols=2)
    if x.shape == () or y.shape == ():  #Single station file
        x = array([x])
        y = array([y])
    d = zeros(x.shape)
    az = zeros(x.shape)
    baz = zeros(x.shape)
    xs = source[1]
    ys = source[2]
    for k in range(len(x)):
        d[k], az[k], baz[k] = gps2dist_azimuth(ys, xs, y[k], x[k])
    d = d / 1000

    if output_coordinates == True:
        return d, az, x, y
    else:
        return d, az
Exemplo n.º 11
0
            def process_function(st, inv):
                # there are possibility that some stations has multiple loc codes or use HH stations. (should avoid in the future)
                st = filter_st(st)

                # overlap the previous trace
                status_code = check_st_numberlap(st)
                if (status_code == -1):
                    return
                elif (status_code == 0):
                    pass
                elif (status_code == 1):
                    # merge may have roblem (samplign rate is not equal)
                    try:
                        st.merge(method=1,
                                 fill_value=0,
                                 interpolation_samples=0)
                    except:  # pylint: disable=bare-except
                        return
                else:
                    raise Exception("unknown status code")

                status_code = check_time(st, event_time, waveform_length)
                if (status_code == 0):
                    pass
                elif (status_code == -1):
                    return
                else:
                    raise Exception("unknown status code")
                # trim will automatically use starttime if starttime>eventtime
                st.trim(event_time, event_time + waveform_length)

                st.detrend("demean")
                st.detrend("linear")
                st.taper(max_percentage=0.05, type="hann")

                # st.remove_response(output="DISP", pre_filt=pre_filt, zero_mean=False,
                #                    taper=False, inventory=inv, water_level=None)
                # here we should use PZ files to remove the response.
                st = remove_response(
                    st,
                    pre_filt=pre_filt,  # pylint: disable=cell-var-from-loop
                    inv=inv)  # pylint: disable=bare-except

                # the same of removing response with sac
                st.detrend("demean")
                st.detrend("linear")

                st.interpolate(sampling_rate=sampling_rate)

                station_latitude = inv[0][0].latitude
                station_longitude = inv[0][0].longitude

                # baz is calculated using station and event's location
                # for cea stations, we can directly add an angle to it
                _, baz, _ = gps2dist_azimuth(station_latitude,
                                             station_longitude, event_latitude,
                                             event_longitude)

                network = inv.get_contents()['networks'][0]
                if (correct_cea and (network in CEA_NETWORKS)):
                    baz = func_correct_cea(baz, inv, event_time,
                                           correction_data)
                if (baz == None):
                    return

                # we have to limit baz to be in [0,360)
                baz = np.mod(baz, 360)

                components = [tr.stats.channel[-1] for tr in st]
                if "N" in components and "E" in components:
                    # there may be some problem in rotating (time span is not equal for three channels)
                    try:
                        st.rotate(method="NE->RT", back_azimuth=baz)
                    except:  # pylint: disable=bare-except
                        return
                else:
                    return

                # bandpass filter
                st.filter(
                    "bandpass",
                    freqmin=1.0 / max_period,  # pylint: disable=cell-var-from-loop
                    freqmax=1.0 / min_period,
                    corners=2,
                    zerophase=True)  # pylint: disable=cell-var-from-loop

                # Convert to single precision to save space.
                for tr in st:
                    tr.data = np.require(tr.data, dtype="float32")

                return st
Exemplo n.º 12
0
def multiple_filter(tr,nbands,bandwidth,fmin,fmax,trim=True,**kwargs):
    '''
    not sure which bandwidth to use, but 0.05 seems to work best
    '''

    t_start = kwargs.get('t_start',50)
    time_len = kwargs.get('time_len',500)
    constant_period_band = kwargs.get('constant_period_band',False)
    virt_src = kwargs.get('virt_src','NA')
    rec_name = kwargs.get('rec_name','NA')
    #period_bandwidth = kwargs.get('period_bandwidth',20.0)

    tr.trim(tr.stats.starttime+t_start,tr.stats.starttime+time_len)
    #tr.normalize()

    if constant_period_band:
        T_center = np.linspace(1./fmax,1./fmin,nbands)
        f_center = 1./T_center
    else:
        f_center = np.linspace(fmin,fmax,nbands)

    x_s = []
    y_s = []
    z_s = []
    max_val = []

    try:
       dist = tr.stats.sac['dist']
    except KeyError:
       #dist = tr.stats.sac['gcarc']*111.19
       distaz = gps2dist_azimuth(tr.stats.sac['evla'],tr.stats.sac['evlo'],
               tr.stats.sac['stla'],tr.stats.sac['stlo'])
       dist = distaz[0]/1000.

    samprate = tr.stats.sampling_rate
    for f in f_center:
        tr_new = tr.copy()
        freqmin = f-(bandwidth/2.0)
        freqmax = f+(bandwidth/2.0)

        if freqmin < 0.002:
           freqmin = 0.002
           print 'jollygood!'

        print 'Freqs:', freqmin,freqmax, 'Periods:', 1./freqmax, 1./freqmin

        if freqmin > 0.0:
           #tr_new.filter('bandpass',freqmin=f-(bandwidth/2.0),freqmax=f+(bandwidth/2.0),
           #        corners=4,zerophase=True)
           tr_new.filter('bandpass',freqmin=freqmin,freqmax=freqmax,
                   corners=4,zerophase=True)
        else:
           tr_new.filter('lowpass',freq=freqmax)

        data_envelope = obspy.signal.filter.envelope(tr_new.data)

        #plt.plot(data_envelope)
        #plt.plot(tr_new.data)
        #plt.show()

        #data_envelope /= np.max(data_envelope)

        t = np.arange(t_start, tr.stats.npts / samprate, 1 / samprate)
        veloc = dist/t
        x = np.zeros(len(t))
        x[:] = 1./f

        #plt.plot(t,data_envelope,'r')
        #plt.plot(t,tr_new.data,'k')
        #plt.show()

        for i in range(0,len(x)):
            x_s.append(x[i])
            y_s.append(veloc[i])
            z_s.append(data_envelope[i])

        max_i = np.argmax(data_envelope)
        max_val.append(veloc[max_i])
        #max_val.append(np.max(data_envelope))

    points = (x_s,y_s)
    data = z_s
    x_axis = np.linspace(5,40,100)
    y_axis = np.linspace(2.5,4.0,100)
    grid_x,grid_y = np.meshgrid(x_axis,y_axis)
    grid_z = griddata(points, data, (grid_x,grid_y))

    #plt.scatter(x_s,y_s,c=z_s,edgecolor='none',s=50)
    #plt.scatter(x_s,y_s,c=z_s,edgecolor='none',s=50,vmin=0.0,vmax=0.3)

    plt.pcolormesh(x_axis,y_axis,grid_z)
    plt.xlabel('period (s)')
    plt.ylabel('velocity (km/s)')
    plt.colorbar()
    #plt.scatter(1./f_center,max_val,marker='*',s=75,c='w')


    #group dispersion maximum
    plt.scatter(1./f_center,max_val,marker='*',s=75,c='w')
    plt.xlim([5,40])
    plt.ylim([2.5,4.0])

    print len(max_val)
    print len(x_s)
    print len(y_s)
    plt.title(virt_src+' '+rec_name)
    plt.savefig('{}_{}_groupveloc.pdf'.format(virt_src,rec_name),format='pdf')
    plt.show()
# to plot up all the stations...
#inventory.plot()

# to get the station coordinates
station_coordinates = []
for network in inventory:
    for station in network:
        station_coordinates.append((network.code, station.code, 
                                    station.latitude, station.longitude, 
                                    station.elevation))


for station in station_coordinates:        
    DegDist = locations2degrees(eventLat, eventLon,
                                station[2], station[3])
    StationAziExpec = gps2dist_azimuth(eventLat, eventLon,
                                       station[2], station[3])
    arrivals = model.get_travel_times(source_depth_in_km = eventDepth,
                                      distance_in_degree=DegDist,
                                      phase_list = ["P"])
    if DegDist > 25 and DegDist < 90:

        arrTime=eventTime + arrivals[0].time    
        bTime=arrTime-200
        eTime=arrTime+300
        try:
            st = client.get_waveforms(station[0],station[1],"00","BH?",
                                      bTime,eTime,attach_response=True)
        except:
            print("No data for station "+station[1])
            continue        
# Break up the stream into traces to remove the gain, taper, demean, filter,etc
Exemplo n.º 14
0
    def GetData(self,stationdirpath='stations',datadirpath='waveforms',req_type='continuous',\
     chunklength=86400,tracelen=20000, vmodel='ak135'):
        '''Call obspy mass downloader to get waveform data. Chunklength refers to the trace length option
		for a continuous download, tracelen is for an event-based request'''

        #Currently set up to download one day worth of data in the continuous mode, 2000 seconds
        #in the event-based mode

        self.stationdirpath = stationdirpath
        self.datadirpath = datadirpath

        from obspy.clients.fdsn.mass_downloader import RectangularDomain, CircularDomain,\
        Restrictions, MassDownloader

        if req_type == 'continuous':

            #Get data from all stations within this domain

            domain = RectangularDomain(minlatitude=self.minlatitude,maxlatitude=self.maxlatitude,\
             minlongitude=self.minlongitude,maxlongitude=self.maxlongitude)

            #Download data in daily segements - may want to change

            restrictions = Restrictions(\
                                    starttime=self.starttime,endtime=self.endtime,\
                                    chunklength_in_sec=chunklength,\
                                    channel=self.channel,station=self.station,location="",\
                                    reject_channels_with_gaps=False,\
                                    minimum_length=0.0,minimum_interstation_distance_in_m=100.0)

            #Call mass downloader to get the waveform information

            mdl = MassDownloader(providers=[self.clientname])

            mdl.download(domain,
                         restrictions,
                         mseed_storage=datadirpath,
                         stationxml_storage=stationdirpath)

        elif req_type == 'event':

            if self.quake_cat == None:

                print(
                    "Stop: Must call fetchEvents first to get event catalog to download from"
                )
                sys.exit(1)

            #Add option for non-continuous download - event/station pairing for example

            #Ger data for all stations in this domain

            domain = RectangularDomain(minlatitude=self.minlatitude,maxlatitude=self.maxlatitude,\
             minlongitude=self.minlongitude,maxlongitude=self.maxlongitude)

            for event in self.quake_cat:
                cnt = 0.
                print("Downloading data for event %s" % event)

                #For each event, download the waveforms at all stations requested

                origin_time = event.origins[0].time

                vel_model = TauPyModel(model=vmodel)

                #case where we only want to download data for some station-event pairs'
                stations_to_exclude = []

                if self.station_autoselect_flag == True:

                    stations_to_download = []
                    evlat = event.origins[0].latitude
                    evlon = event.origins[0].longitude

                    #EK changes added 04/2019
                    evdep = event.origins[0].depth

                    for network in self.inventory:

                        for station in network:

                            stlat = station.latitude
                            stlon = station.longitude

                            #EK 04/2019
                            #this downloads data within Short Wave Window (SWW), a cone under the station bounded by an angle, here we chose 45 deg
                            #calculate distance between eq and station and azimuth

                            ddeg = locations2degrees(evlat, evlon, stlat,
                                                     stlon)
                            distance_m, az, baz = gps2dist_azimuth(
                                evlat, evlon, stlat, stlon)

                            #calculate proxy for incident angle

                            theta = np.arctan2(distance_m, evdep)

                            if theta <= np.pi / 4:

                                #find if station has needed arrival

                                arrivals = vel_model.get_travel_times(
                                    source_depth_in_km=evdep / 1000.,
                                    distance_in_degree=ddeg,
                                    phase_list=["s", "S"])
                                if len(arrivals) > 0:

                                    #get stations you want to download

                                    stations_to_download.append(station.code)
                                    print(station.code,
                                          'angle = %.2f' % np.rad2deg(theta))
                                    print(arrivals)
                                    cnt = cnt + 1
                                else:
                                    stations_to_exclude.append(station.code)
                            else:

                                if station.code not in stations_to_exclude:
                                    stations_to_exclude.append(station.code)

                    print(
                        "\n-------------\n%g event-station pairs found in SWW\n-------------\n"
                        % cnt)
                    print(
                        "\n-------------\nSelecting just the following stations for download\n-------------\n"
                    )
                    print(stations_to_download)

                    #this approach doesn't work, use exclude_stations flag later
                    #restrictions = Restrictions(starttime=origin_time,endtime=origin_time + tracelen,\
                    #reject_channels_with_gaps=False, minimum_length=0.95, minimum_interstation_distance_in_m=10E3,\
                    #channel=self.channel,location="",network=self.network,station=stations_to_download)

                #case where we have single network

                if self.network:

                    restrictions = Restrictions(starttime=origin_time,endtime=origin_time + tracelen,\
                     reject_channels_with_gaps=False, minimum_length=0.95, minimum_interstation_distance_in_m=10E3,\
                     channel=self.channel,location="",network=self.network,exclude_stations=stations_to_exclude)

                #Case where we want all networks within a region (assumes that we also want all stations unless we have built
                # a stations to exclude list)

                else:

                    restrictions = Restrictions(starttime=origin_time,endtime=origin_time + tracelen,\
                     reject_channels_with_gaps=False, minimum_length=0.95, minimum_interstation_distance_in_m=10E3,\
                     channel=self.channel,exclude_stations=stations_to_exclude)

                mdl = MassDownloader(providers=[self.clientname])

                mdl.download(domain, restrictions, mseed_storage=datadirpath,\
                 stationxml_storage=stationdirpath)
Exemplo n.º 15
0
    def writeEvents(self, centercoords=None):
        '''Write event information to file, which can be loaded as a pandas dataframe.
		Specify centercoords as a list [lon,lat] and the time of the first arrival (P) arrival
		will be reported'''

        ofname = 'Events_%s_%s_%s_%s_%s_%s_mag_%s-%s_depth_%s-%s_km.dat' %(self.starttime,self.endtime,self.minlatitude,\
         self.minlongitude,self.maxlatitude,self.maxlongitude,self.minmag,self.maxmag,self.mindepth,self.maxdepth)

        outfile = open(ofname, 'w')

        if self.quake_cat == None:

            print("Need to call fetchEvents first")
            sys.exit(1)

        if centercoords == None:

            for event in self.quake_cat:

                time = event.origins[0].time
                lat = event.origins[0].latitude
                lon = event.origins[0].longitude
                dep = event.origins[0].depth / 1000.
                mag = event.magnitudes[0].mag

                if self.station_autoselect_flag == True:

                    cnt = 0

                    for network in self.inventory:
                        for station in network:

                            stlat = station.latitude
                            stlon = station.longitude

                            ddeg = locations2degrees(lat, lon, stlat, stlon)
                            distance_m, az, baz = gps2dist_azimuth(
                                lat, lon, stlat, stlon)

                            theta = np.arctan2(distance_m, dep * 1000.)

                            if theta <= np.pi / 4:

                                arrivals = self.vmodel.get_travel_times(
                                    source_depth_in_km=dep,
                                    distance_in_degree=ddeg,
                                    phase_list=["s", "S"])

                                if len(arrivals) > 0:
                                    cnt = cnt + 1
                    if cnt > 0:
                        outfile.write("%s %s %s %s %s\n" %
                                      (lon, lat, dep, mag, time))
                else:
                    outfile.write("%s %s %s %s %s\n" %
                                  (lon, lat, dep, mag, time))

        #haven't added the SWW here, so in this case all events wiil be written to the file, might change in the future if needed

        #In this case, we write the time of the first arriving phase at the stations

        else:

            try:
                clon = centercoords[1]
                clat = centercoords[0]
            except:
                print("Centercoors needs to be entered as list [lon,lat]")
                sys.exit(1)

            for event in self.quake_cat:

                time = event.origins[0].time
                lat = event.origins[0].latitude
                lon = event.origins[0].longitude
                dep = event.origins[0].depth / 1000.0

                try:

                    cdist = locations2degrees(lat, lon, clat, clon)
                    arrivals = self.vmodel.get_travel_times(source_depth_in_km=dep,\
                    distance_in_degree=cdist,phase_list=["p","P"])
                except:
                    continue

                if len(arrivals) > 0:
                    first_phase = arrivals[0].name
                    first_phase_time = time + arrivals[0].time

                else:
                    first_phase = 'NaN'
                    first_phase_time = "NaN"

                mag = event.magnitudes[0].mag

                outfile.write("%s %s %s %s %s %s %s %s\n" %
                              (lon, lat, dep, mag, time, first_phase_time,
                               first_phase, cdist))

        outfile.close()
Exemplo n.º 16
0
    def compute(self):
        ### Compute change status value for each point of the grid ###

        BETAS = []  # list of status values for the points of the grid
        for j in range(len(self.x)):  # iterate the grid

            info = []  # compute dist of events from current point
            for i in range(len(self.lon)):
                info.append([
                    self.lat[i], self.lon[i], self.time[i],
                    base.gps2dist_azimuth(self.y[j], self.x[j], self.lat[i],
                                          self.lon[i])[0]
                ])
            info = sorted(
                info, key=getKey
            )  # sort events with increasing distance from the current point

            cluster = [
            ]  # time of events selected to compute beta value of the current point
            i = 0
            while info[i][3] < self.spacemin:  # keep events within 50 km radius
                cluster.append(info[i][2])
                i = i + 1

            if len(cluster) < self.nbmin:  # if not enough events

                spmin = self.spacemin + 10000  # extend area
                while len(
                        cluster
                ) < self.nbmin and spmin < self.spacemax:  #increase radius until either there are enough events or the max radius is reached
                    while info[i][3] < spmin:
                        cluster.append(info[i][2])
                        i += 1
                    spmin += 10000

                if spmin >= self.spacemax:  # maximum radius reached before minimum nb reached
                    BETAS.append(np.nan)

                else:  # minimum number reached
                    beta = evaluate_beta(self.changetime, sorted(cluster),
                                         self.Dt2, self.Dt1)[0]
                    print(beta)
                    if beta >= 2:
                        betainf = evaluate_beta(self.changetime,
                                                sorted(cluster), self.Dt2,
                                                self.Dt1)[1]
                        BETAS.append(betainf)  # significant increase
                        print(betainf)
                    elif -2 < beta < 2:
                        BETAS.append(0)  # insignificant change

                    elif -2 >= beta:
                        betasup = evaluate_beta(self.changetime,
                                                sorted(cluster), self.Dt2,
                                                self.Dt1)[2]
                        BETAS.append(betasup)  # significant decrease

            else:  # enough events in the minimum radius
                beta = evaluate_beta(self.changetime, sorted(cluster),
                                     self.Dt2, self.Dt1)[0]
                print(beta)
                if beta >= 2:
                    betainf = evaluate_beta(self.changetime, sorted(cluster),
                                            self.Dt2, self.Dt1)[1]
                    BETAS.append(betainf)  # significant increase
                    print(betainf)
                elif -2 < beta < 2:
                    BETAS.append(0)  # insignificant change

                elif -2 >= beta:
                    betasup = evaluate_beta(self.changetime, sorted(cluster),
                                            self.Dt2, self.Dt1)[2]
                    BETAS.append(betasup)  # significant decrease

        # write results on a file
        file = open('./USED_DATA_FILES/' + str(self.infile)[18:-4] +
                    'beta.txt', 'w')  # removing the first '['character
        file.write(str(self.x)[1:])
        file.write(str(self.y))
        file.write(str(BETAS)[:-1])  # removing the last ']' character
        file.close()
        return
Exemplo n.º 17
0
def pretty_plot(st, stack, eve, not_used, comp, inv, paramdic, debug=False):
    st2 = st.select(component=comp)
    diss = []
    # compute distances
    for tr in st2:
        coors = inv.get_coordinates(tr.id[:-1] + 'Z')
        (dis, azi, bazi) = gps2dist_azimuth(coors['latitude'],
                                            coors['longitude'],
                                            eve.origins[0].latitude,
                                            eve.origins[0].longitude)
        disdeg = kilometer2degrees(dis / 1000.)
        diss.append(disdeg)
    if debug:
        print(diss)
    mdiss = min(diss)
    Mdiss = max(diss)
    ptp = np.ptp(stack)
    ran = 0.3 * (Mdiss - mdiss) * ptp
    fig = plt.figure(1, figsize=(12, 12))
    tithand = st[0].stats.network + ' ' + paramdic['phase'] + '-Wave '
    if comp == 'R':
        tithand += ' Radial '
    elif comp == 'Z':
        tithand += ' Vertical '
    elif comp == 'T':
        tithand += ' Transverse '
    tithand += str(eve['origins'][0]['time'].year) + ' '
    tithand += str(eve['origins'][0]['time'].julday) + ' '
    tithand += str(eve['origins'][0]['time'].hour).zfill(2) + ':' + str(
        eve['origins'][0]['time'].minute).zfill(2)
    mag = eve.magnitudes[0].mag
    magstr = eve.magnitudes[0].magnitude_type
    if 'Lg' in magstr:
        magstr = 'mb_{Lg}'
    gmax, gmin = -100., 500.
    tithand += ' $' + magstr + '$=' + str(mag)
    plt.title(tithand)
    for pair in zip(diss, st2):
        t = pair[1].times()
        if max(pair[1].data / ran + pair[0]) > gmax:
            gmax = max(pair[1].data / ran + pair[0])
        if min(pair[1].data / ran + pair[0]) < gmin:
            gmin = min(pair[1].data / ran + pair[0])
        p = plt.plot(t, pair[1].data / ran + pair[0])
        plt.text(min(t) + 1.,
                 pair[0] - +.2, (pair[1].id)[:-4].replace('.', ' '),
                 color=p[0].get_color())
        plt.plot(t, stack / ran + pair[0], color='k', alpha=0.5, linewidth=3)
    plt.plot([10., 10.], [0., 2 * Mdiss + ran], color='k', linewidth=3)
    plt.ylim((gmin - 0.02 * gmin, gmax + 0.02 * gmax))
    plt.xlim((min(t), max(t)))
    plt.xlabel('Time (s)')
    plt.ylabel('Distance (deg)')
    if not os.path.exists(st[0].stats.network + '_results'):
        os.mkdir(st[0].stats.network + '_results')

    plt.savefig(st[0].stats.network + '_results/' + st[0].stats.network + '_' +
                comp + '_' + str(eve['origins'][0]['time'].year) +
                str(eve['origins'][0]['time'].julday) + '_' +
                str(eve['origins'][0]['time'].hour).zfill(2) +
                str(eve['origins'][0]['time'].minute).zfill(2) + '.png',
                format='PNG',
                dpi=400)

    #plt.show()
    plt.clf()
    plt.close()
    return
Exemplo n.º 18
0
def fit_spectra(st, origin, kappa=0.035):
    """
    Fit spectra vaying stress_drop and kappa.

    Args:
        st (StationStream):
            Stream of data.
        origin (dict):
             Dictionary with the following keys:
              - eventid
              - magnitude
              - time (UTCDateTime object)
              - lon
              - lat
              - depth
        kappa (float):
            Site diminution factor (sec). Typical value for active cruststal
            regions is about 0.03-0.04, and stable continental regions is about
            0.006.

    Returns:
        StationStream with fitted spectra parameters.
    """
    for tr in st:
        # Only do this for horizontal channels for which the smoothed spectra
        # has been computed.
        if ('Z' not in tr.stats['channel'].upper()) & \
                tr.hasParameter('smooth_signal_spectrum'):
            event_mag = origin['magnitude']
            event_lon = origin['lon']
            event_lat = origin['lat']
            dist = gps2dist_azimuth(
                lat1=event_lat,
                lon1=event_lon,
                lat2=tr.stats['coordinates']['latitude'],
                lon2=tr.stats['coordinates']['longitude']
            )[0] * M_TO_KM

            # Use the smoothed spectra for fitting
            smooth_signal_dict = tr.getParameter('smooth_signal_spectrum')
            freq = np.array(smooth_signal_dict['freq'])
            obs_spec = np.array(smooth_signal_dict['spec'])

            # Loop over trial stress drops and kappas compute RMS fit
            # of the spectra
            rms = []
            rms_stress = []
            rms_f0 = []
            for i in range(len(TRIAL_STRESS_DROPS)):
                # Pick min f for cost function that is slightly less than
                # corner frequency.
                f0 = brune_f0(event_mag, TRIAL_STRESS_DROPS[i])
                fmin = FMIN_FAC * f0
                fmax = -np.log(FMAX_FAC)/np.pi/kappa

                rms_f0.append(f0)
                mod_spec = model(
                    freq, dist, kappa,
                    event_mag, TRIAL_STRESS_DROPS[i]
                )

                # Comput rms fit in log space, append to list
                log_residuals = (
                    np.log(obs_spec[(freq >= fmin) & (freq <= fmax)]) -
                    np.log(mod_spec[(freq >= fmin) & (freq <= fmax)])
                )
                rms.append(np.sqrt(np.mean((log_residuals)**2)))

                # Track the values of kappa and stress
                rms_stress.append(TRIAL_STRESS_DROPS[i])

            # Find the kappa-stress pair with best fit
            idx = np.where(rms == np.nanmin(rms))[0][0]
            fit_spectra_dict = {
                'stress_drop': rms_stress[idx],
                'epi_dist': dist,
                'kappa': kappa,
                'magnitude': event_mag,
                'f0': rms_f0[idx]
            }
            tr.setParameter('fit_spectra', fit_spectra_dict)

    return st
def signal_end(st, event_time, event_lon, event_lat, event_mag,
               method=None, vmin=None, floor=None,
               model=None, epsilon=2.0):
    """
    Estimate end of signal by using a model of the 5-95% significant
    duration, and adding this value to the "signal_split" time. This probably
    only works well when the split is estimated with a p-wave picker since
    the velocity method often ends up with split times that are well before
    signal actually starts.

    Args:
        st (StationStream):
            Stream of data.
        event_time (UTCDateTime):
            Event origin time.
        event_mag (float):
            Event magnitude.
        event_lon (float):
            Event longitude.
        event_lat (float):
            Event latitude.
        method (str):
            Method for estimating signal end time. Either 'velocity'
            or 'model'.
        vmin (float):
            Velocity (km/s) for estimating end of signal. Only used if
            method="velocity".
        floor (float):
            Minimum duration (sec) applied along with vmin.
        model (str):
            Short name of duration model to use. Must be defined in the
            gmprocess/data/modules.yml file.
        epsilon (float):
            Number of standard deviations; if epsilon is 1.0, then the signal
            window duration is the mean Ds + 1 standard deviation. Only used
            for method="model".

    Returns:
        trace with stats dict updated to include a
        stats['processing_parameters']['signal_end'] dictionary.

    """
    # Load openquake stuff if method="model"
    if method == "model":
        mod_file = pkg_resources.resource_filename(
            'gmprocess', os.path.join('data', 'modules.yml'))
        with open(mod_file, 'r') as f:
            mods = yaml.load(f)

        # Import module
        cname, mpath = mods['modules'][model]
        dmodel = getattr(import_module(mpath), cname)()

        # Set some "conservative" inputs (in that they will tend to give
        # larger durations).
        sctx = SitesContext()
        sctx.vs30 = np.array([180.0])
        sctx.z1pt0 = np.array([0.51])
        rctx = RuptureContext()
        rctx.mag = event_mag
        rctx.rake = -90.0
        dur_imt = imt.from_string('RSD595')
        stddev_types = [const.StdDev.INTRA_EVENT]

    for tr in st:
        if not tr.hasParameter('signal_split'):
            continue
        if method == "velocity":
            if vmin is None:
                raise ValueError('Must specify vmin if method is "velocity".')
            if floor is None:
                raise ValueError('Must specify floor if method is "velocity".')
            epi_dist = gps2dist_azimuth(
                lat1=event_lat,
                lon1=event_lon,
                lat2=tr.stats['coordinates']['latitude'],
                lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0
            end_time = event_time + max(floor, epi_dist / vmin)
        elif method == "model":
            if model is None:
                raise ValueError('Must specify model if method is "model".')
            epi_dist = gps2dist_azimuth(
                lat1=event_lat,
                lon1=event_lon,
                lat2=tr.stats['coordinates']['latitude'],
                lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0
            dctx = DistancesContext()
            # Repi >= Rrup, so substitution here should be conservative
            # (leading to larger durations).
            dctx.rrup = np.array([epi_dist])
            lnmu, lnstd = dmodel.get_mean_and_stddevs(
                sctx, rctx, dctx, dur_imt, stddev_types)
            duration = np.exp(lnmu + epsilon * lnstd[0])
            # Get split time
            split_time = tr.getParameter('signal_split')['split_time']
            end_time = split_time + float(duration)
        else:
            raise ValueError('method must be either "velocity" or "model".')
        # Update trace params
        end_params = {
            'end_time': end_time,
            'method': method,
            'vsplit': vmin,
            'floor': floor,
            'model': model,
            'epsilon': epsilon
        }
        tr.setParameter('signal_end', end_params)

    return st
Exemplo n.º 20
0
def list_duplicates(catalog,
                    dirname,
                    timewindow=2,
                    distwindow=15,
                    magwindow=None,
                    minmag=-5,
                    locfilter=None):
    """Make a list of possible duplicate events."""
    catalog.loc[:, 'convtime'] = [
        ' '.join(x.split('T')) for x in catalog['time'].tolist()
    ]
    catalog.loc[:, 'convtime'] = catalog['convtime'].astype('datetime64[ns]')
    catalog = catalog[catalog['mag'] >= minmag]
    if locfilter:
        catalog = catalog[catalog['place'].str.contains(locfilter, na=False)]
    cat = catalog[[
        'time', 'convtime', 'id', 'latitude', 'longitude', 'depth', 'mag'
    ]].copy()
    cat.loc[:, 'time'] = [qcu.to_epoch(x) for x in cat['time']]

    duplines1 = [('Possible duplicates using %ss time threshold and %skm '
                  'distance threshold\n') % (timewindow, distwindow),
                 '***********************\n'
                 'date time id latitude longitude depth magnitude '
                 '(distance) (Δ time) (Δ magnitude)\n']
    duplines2 = [('\n\nPossible duplicates using 16s time threshold and 100km '
                  'distance threshold\n'), '***********************\n'
                 'date time id latitude longitude depth magnitude '
                 '(distance) (Δ time) (Δ magnitude)\n']
    sep = '-----------------------\n'

    thresh1dupes, thresh2dupes = 0, 0
    for event in cat.itertuples():

        trimdf = cat[cat['convtime'].between(event.convtime,
                                             event.convtime +
                                             pd.Timedelta(seconds=16),
                                             inclusive=False)]

        if len(trimdf) != 0:
            for tevent in trimdf.itertuples():
                dist = gps2dist_azimuth(event.latitude, event.longitude,
                                        tevent.latitude,
                                        tevent.longitude)[0] / 1000.
                if dist < 100:
                    dtime = (event.convtime - tevent.convtime).total_seconds()
                    dmag = event.mag - tevent.mag
                    diffs = map('{:.2f}'.format, [dist, dtime, dmag])

                    dupline1 = ' '.join([str(x) for x in event[1:]]) + ' ' +\
                               ' '.join(diffs) + '\n'
                    dupline2 = ' '.join([str(x) for x in tevent[1:]]) + '\n'
                    duplines2.extend((sep, dupline1, dupline2))

                    thresh2dupes += 1

                    if (dist < distwindow) and (abs(dtime) < timewindow):
                        duplines1.extend((sep, dupline1, dupline2))
                        thresh1dupes += 1

            continue

    with open('%s_duplicates.txt' % dirname, 'w') as dupfile:
        for dupline in duplines1:
            dupfile.write(dupline)
        for dupline in duplines2:
            dupfile.write(dupline)

    return thresh1dupes, thresh2dupes
Exemplo n.º 21
0
def Acces_Blindtest_check():
    BLINDTEST_MSEED = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Database/data_Nienke/M5.0_3914855_deg_2019-09-22.mseed'
    BLINDTEST_XML = BLINDTEST_MSEED.replace(".mseed", ".xml")

    # Initiate Parameters:
    get_parameters = Get_Paramters()
    PRIOR = get_parameters.get_prior()
    VALUES = get_parameters.specifications()
    VALUES['npts'] = 2000
    VALUES[
        'directory'] = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Blindtest/check_waveforms'
    VALUES['blind'] = True

    # st = read(VALUES['directory'] + '/bw.mseed')
    # st_reject = read(VALUES['directory'] + '/bw_reject.mseed')

    # Initiate the databases from instaseis:
    db = instaseis.open_db(PRIOR['VELOC'])
    tr_obs = obspy.read(BLINDTEST_MSEED)
    # tr_obs.plot(outfile=VALUES['directory'] + '/Observed')
    tr_obs.integrate()
    tr_obs.plot(outfile=VALUES['directory'] + '/Observed_integrated')
    source = instaseis.Source.parse(BLINDTEST_XML)
    blindtest = Blindtest()
    events = blindtest.get_events(BLINDTEST_XML)
    # get_parameters.get_prior_blindtest(events[0])
    time, depth, la_s, lo_s = blindtest.get_pref_origin(events[0])

    dist, az, baz = gps2dist_azimuth(lat1=la_s,
                                     lon1=lo_s,
                                     lat2=PRIOR['la_r'],
                                     lon2=PRIOR['lo_r'],
                                     a=PRIOR['radius'],
                                     f=0)
    epi = kilometer2degrees(dist, radius=PRIOR['radius'])
    PRIOR['az'] = az
    PRIOR['baz'] = baz
    PRIOR['epi']['range_min'] = epi - 5
    PRIOR['epi']['range_max'] = epi + 5
    PRIOR['epi']['spread'] = 1
    PRIOR['depth']['range_min'] = depth - 10000
    PRIOR['depth']['range_max'] = depth + 10000
    PRIOR['network'] = tr_obs.traces[0].meta.network
    PRIOR['location'] = tr_obs.traces[0].meta.location
    PRIOR['station'] = tr_obs.traces[0].meta.station
    est_noise = Create_observed(PRIOR, db)
    create = Source_code(PRIOR['VELOC_taup'])
    traces_obs, p_obs, s_obs, p_time_obs, s_time_obs = create.get_window_obspy(
        tr_obs, epi, depth, time, VALUES['npts'])
    PRIOR['var_est'] = est_noise.get_var_data(p_time_obs, tr_obs)
    obs_time = Create_observed(PRIOR, db)
    time_at_receiver = obs_time.get_receiver_time(epi, depth, time)
    plt.figure()

    catalog_path = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Additional_scripts/MQScatalog_withFrequencies/MQS_absolute_withFrequencyInfo.xml'

    events_catalog = blindtest.get_events(catalog_path)

    for v in events_catalog:
        t, d, lat_ev, lo_ev = blindtest.get_pref_origin(v)
        if time.date == t.date:
            Pick_event = v
            break
    PRIOR['M0'] = blindtest.get_pref_scalarmoment(Pick_event)
    picks_surface = get_phase_picks(Pick_event, pick_type='surface')
    R_env_obs, L_env_obs = blindtest.pick_sw(tr_obs,
                                             picks_surface,
                                             epi,
                                             PRIOR,
                                             VALUES['npts'],
                                             VALUES['directory'],
                                             plot_modus=True)

    start_sample = create_starting_sample()
    strike = 243.423396191
    dip = 34.436087773
    rake = 164.912874159

    from Seismogram import Seismogram
    from Misfit import Misfit
    misfit = Misfit(VALUES['directory'])
    seis = Seismogram(PRIOR, db)
    epi = epi - 3
    depth = depth

    # ---------------------------------------------------------------------------------------------------------------  #
    dict = geo.Geodesic(a=PRIOR['radius'], f=0).ArcDirect(lat1=PRIOR['la_r'],
                                                          lon1=PRIOR['lo_r'],
                                                          azi1=PRIOR['baz'],
                                                          a12=epi,
                                                          outmask=1929)
    d_syn, traces_syn, sources = seis.get_seis_manual(la_s=dict['lat2'],
                                                      lo_s=dict['lon2'],
                                                      depth=depth,
                                                      strike=strike,
                                                      dip=dip,
                                                      rake=rake,
                                                      time=time,
                                                      M0=PRIOR['M0'],
                                                      sdr=VALUES['sdr'])

    R_env_syn, L_env_syn = blindtest.pick_sw(traces_syn,
                                             picks_surface,
                                             epi,
                                             PRIOR,
                                             VALUES['npts'],
                                             VALUES['directory'],
                                             plot_modus=False)

    traces_syn.plot(outfile=VALUES['directory'] + '/syntethic')
    total_syn, p_syn, s_syn, p_time_syn, s_time_syn = create.get_window_obspy(
        traces_syn, epi, depth, time, VALUES['npts'])

    ax1 = plt.subplot2grid((5, 1), (0, 0))
    ax1.plot(zero_to_nan(p_syn.traces[0].data), c='r', linewidth=0.3)
    ax1.plot(zero_to_nan(p_obs.traces[0].data),
             c='k',
             linestyle=':',
             linewidth=0.3)
    plt.tight_layout()
    ax2 = plt.subplot2grid((5, 1), (1, 0))
    ax2.plot(zero_to_nan(p_syn.traces[1].data), c='r', linewidth=0.3)
    ax2.plot(zero_to_nan(p_obs.traces[1].data),
             c='k',
             linestyle=':',
             linewidth=0.3)
    plt.tight_layout()
    ax3 = plt.subplot2grid((5, 1), (2, 0))
    ax3.plot(zero_to_nan(s_syn.traces[0].data), c='r', linewidth=0.3)
    ax3.plot(zero_to_nan(s_obs.traces[0].data), c='k', linewidth=0.3)
    plt.tight_layout()
    ax4 = plt.subplot2grid((5, 1), (3, 0))
    ax4.plot(zero_to_nan(s_syn.traces[1].data), c='r', linewidth=0.3)
    ax4.plot(zero_to_nan(s_obs.traces[1].data), c='k', linewidth=0.3)
    plt.tight_layout()
    ax5 = plt.subplot2grid((5, 1), (4, 0))
    ax5.plot(zero_to_nan(s_syn.traces[2].data), c='r', linewidth=0.3)
    ax5.plot(zero_to_nan(s_obs.traces[2].data), c='k', linewidth=0.3)
    plt.tight_layout()

    plt.savefig(VALUES['directory'] + '/%.2f_%.2f.pdf' % (epi, depth))
    plt.close()

    # time =

    ax1 = plt.subplot2grid((3, 1), (0, 0))
    ax1.plot(zero_to_nan(total_syn.traces[0].data), c='r', linewidth=0.5)
    ax1.plot(zero_to_nan(traces_obs.traces[0].data),
             c='k',
             linestyle=':',
             linewidth=0.5)
    ax1.set_title('SYNTHETIC: = epi: %.2f  REAL: epi = %.2f (depth fixed' %
                  (epi, epi + 3))
    plt.tight_layout()
    ax2 = plt.subplot2grid((3, 1), (1, 0))
    ax2.plot(zero_to_nan(total_syn.traces[1].data), c='r', linewidth=0.5)
    ax2.plot(zero_to_nan(traces_obs.traces[1].data),
             c='k',
             linestyle=':',
             linewidth=0.5)
    plt.tight_layout()
    ax3 = plt.subplot2grid((3, 1), (2, 0))
    ax3.plot(zero_to_nan(total_syn.traces[2].data), c='r', linewidth=0.5)
    ax3.plot(zero_to_nan(traces_obs.traces[2].data),
             c='k',
             linestyle=':',
             linewidth=0.5)
    plt.tight_layout()

    plt.savefig(VALUES['directory'] + '/PS_%.2f_%.2f.pdf' % (epi, depth))
    plt.close()

    Xi_bw_new, time_shift_new, amplitude = misfit.CC_stream(
        p_obs, p_syn, s_obs, s_syn, p_time_obs, p_time_syn)
    s_z_new = 0.1 * Xi_bw_new[0]
    s_r_new = 0.1 * Xi_bw_new[1]
    s_t_new = 1 * Xi_bw_new[2]
    p_z_new = 5 * Xi_bw_new[3]
    p_r_new = 5 * Xi_bw_new[4]
    bw_new = s_z_new + s_r_new + s_t_new + p_z_new + p_r_new
    Xi_R_new = misfit.SW_L2(R_env_obs, R_env_syn, PRIOR['var_est'], amplitude)
    Xi_L_new = misfit.SW_L2(L_env_obs, L_env_syn, PRIOR['var_est'], amplitude)

    R_dict_new = {}
    rw_new = 0
    for j, v in enumerate(Xi_R_new):
        R_dict_new.update({'R_%i_new' % j: v})
        rw_new += v

    L_dict_new = {}
    lw_new = 0
    for j, v in enumerate(Xi_L_new):
        L_dict_new.update({'L_%i_new' % j: v})
        lw_new += v
    Xi_new = bw_new + rw_new + lw_new
    a = 1
Exemplo n.º 22
0
    def _load_events(self):
        self._load_events_helper()
        cache = {}
        notFound = defaultdict(int)
        oEvents = []
        missingStations = defaultdict(int)
        for e in self.eventList:
            if (e.preferred_origin and len(e.preferred_origin.arrival_list)):
                cullList = []
                for a in e.preferred_origin.arrival_list:
                    if (len(a.net)): continue

                    seedid = '%s.%s.%s.%s' % (a.net, a.sta, a.loc, a.cha)
                    newCode = None
                    if (seedid not in cache):
                        sc = a.sta
                        lonlat = self.isc_coords_dict[sc]
                        if (len(lonlat) == 0):
                            cullList.append(a)
                            continue
                        # end if

                        r = self.fdsn_inventory.getClosestStations(lonlat[0],
                                                                   lonlat[1],
                                                                   maxdist=1e3)
                        #if(a.sta=='KUM'): print a.net, a.sta, a.loc, a.cha, r
                        if (not r):
                            notFound[sc] += 1
                        else:
                            for cr in r[0]:
                                c = cr.split('.')[0]
                                newCode = c
                            # end for
                        # end if

                        if (newCode):
                            cache[seedid] = newCode
                        # end if
                    else:
                        newCode = cache[seedid]
                    # end if

                    if (newCode):
                        #print a.net, newCode
                        a.net = newCode

                        sc = self.fdsn_inventory.t[a.net][a.sta]
                        if (type(sc) == defaultdict):
                            cullList.append(a)
                            continue
                        # end if
                        da = gps2dist_azimuth(e.preferred_origin.lat,
                                              e.preferred_origin.lon, sc[1],
                                              sc[0])
                        dist = kilometers2degrees(da[0] / 1e3)
                        if (np.fabs(a.distance - dist) > 0.5):
                            cullList.append(a)
                        # end if
                    # end if
                # end for
                for c in cullList:
                    e.preferred_origin.arrival_list.remove(c)
            # end if

            # Create obspy event object
            ci = OCreationInfo(author='GA',
                               creation_time=UTCDateTime(),
                               agency_id='GA-iteration-1')
            oid = self.get_id()
            origin = OOrigin(resource_id=OResourceIdentifier(id=oid),
                             time=UTCDateTime(e.preferred_origin.utctime),
                             longitude=e.preferred_origin.lon,
                             latitude=e.preferred_origin.lat,
                             depth=e.preferred_origin.depthkm * 1e3,
                             method_id=OResourceIdentifier(id='unknown'),
                             earth_model_id=OResourceIdentifier(id='iasp91'),
                             evaluation_mode='automatic',
                             creation_info=ci)
            magnitude = OMagnitude(
                resource_id=OResourceIdentifier(id=self.get_id()),
                mag=e.preferred_magnitude.magnitude_value,
                magnitude_type=e.preferred_magnitude.magnitude_type,
                origin_id=OResourceIdentifier(id=oid),
                creation_info=ci)
            event = OEvent(resource_id=OResourceIdentifier(id=self.get_id()),
                           creation_info=ci,
                           event_type='earthquake')
            event.origins = [origin]
            event.magnitudes = [magnitude]
            event.preferred_magnitude_id = magnitude.resource_id
            event.preferred_origin_id = origin.resource_id

            # Insert old picks
            for a in e.preferred_origin.arrival_list:
                if (type(self.fdsn_inventory.t[a.net][a.sta]) == defaultdict):
                    missingStations[a.net + '.' + a.sta] += 1
                    continue
                # end if
                oldPick = OPick(
                    resource_id=OResourceIdentifier(id=self.get_id()),
                    time=UTCDateTime(a.utctime),
                    waveform_id=OWaveformStreamID(network_code=a.net,
                                                  station_code=a.sta,
                                                  channel_code=a.cha),
                    methodID=OResourceIdentifier('unknown'),
                    phase_hint=a.phase,
                    evaluation_mode='automatic',
                    creation_info=ci)

                oldArr = OArrival(resource_id=OResourceIdentifier(
                    id=oldPick.resource_id.id + "#"),
                                  pick_id=oldPick.resource_id,
                                  phase=oldPick.phase_hint,
                                  distance=a.distance,
                                  earth_model_id=OResourceIdentifier(
                                      'quakeml:ga.gov.au/earthmodel/iasp91'),
                                  creation_info=ci)

                event.picks.append(oldPick)
                event.preferred_origin().arrivals.append(oldArr)
            # end for

            # Insert our picks
            opList = self.our_picks.picks[e.public_id]
            if (len(opList)):
                for op in opList:
                    if (type(self.fdsn_inventory.t[op[1]][op[2]]) ==
                            defaultdict):
                        missingStations[op[1] + '.' + op[2]] += 1
                        continue
                    # end if
                    newPick = OPick(
                        resource_id=OResourceIdentifier(id=self.get_id()),
                        time=UTCDateTime(op[0]),
                        waveform_id=OWaveformStreamID(network_code=op[1],
                                                      station_code=op[2],
                                                      channel_code=op[3]),
                        methodID=OResourceIdentifier('phasepapy/aicd'),
                        backazimuth=op[-1],
                        phase_hint=op[4],
                        evaluation_mode='automatic',
                        comments=op[6],
                        creation_info=ci)

                    newArr = OArrival(
                        resource_id=OResourceIdentifier(
                            id=newPick.resource_id.id + "#"),
                        pick_id=newPick.resource_id,
                        phase=newPick.phase_hint,
                        azimuth=op[-2],
                        distance=op[-3],
                        time_residual=op[5],
                        time_weight=1.,
                        earth_model_id=OResourceIdentifier(
                            'quakeml:ga.gov.au/earthmodel/iasp91'),
                        creation_info=ci)
                    event.picks.append(newPick)
                    event.preferred_origin().arrivals.append(newArr)
                # end for
            # end if

            quality = OOriginQuality(
                associated_phase_count=len(e.preferred_origin.arrival_list) +
                len(self.our_picks.picks[e.public_id]),
                used_phase_count=len(e.preferred_origin.arrival_list) +
                len(self.our_picks.picks[e.public_id]))
            event.preferred_origin().quality = quality
            oEvents.append(event)
        # end for // loop over e

        #print notFound
        print self.rank, missingStations

        cat = OCatalog(events=oEvents)
        ofn = self.output_path + '/%d.xml' % (self.rank)
        cat.write(ofn, format='SC3ML')
Exemplo n.º 23
0
def sdxtoquakeml(sdx_dir, out_xml,
                 time_uncertainties=[0.1, 0.2, 0.5, 0.8, 1.5],
                 catalog_description="", catalog_version="",
                 agency_id="", author="", vel_mod_id=""):
    """
    Convert SDX to QuakeML format using ObsPy inventory structure.
    SDX filename prefix is stored under event description.
    Input parameters:
        - sdx_dir: directory containing sdx files (required)
        - out_xml: Filename of quakeML file (required)
        - time_uncertainties: List containing time uncertainities in seconds
          for mapping from weights 0-4, respectively (optional)
        - catalog_description (optional)
        - cat_agency_id (optional)
        - author (optional)
        - vel_mod_id (optional)
    Output:
        - xml catalog in QuakeML format.
    """

    # Prepare catalog
    cat = Catalog(description=catalog_description,
                  creation_info=CreationInfo(
                      author=author, agency_id=agency_id,
                      version=catalog_version))

    # Read in sdx files in directory, recursively
    files = glob.glob("{:}/**/*.sdx".format(sdx_dir), recursive=True)
    if len(files) == 0:
        print("No SDX files found in path. Exiting")
    for sdx_file_path in files:
        print("Working on ", sdx_file_path.split('/')[-1])

        # Set-up event
        evt_id = (sdx_file_path.split('/')[-1])[:-4]
        event = Event(event_type="earthquake", creation_info=CreationInfo(
            author=author, agency_id=agency_id),
            event_descriptions=[EventDescription(text=evt_id)])

        # Get station details, append to arrays
        sdx_file = open(sdx_file_path, "r")
        stations = []
        for line in sdx_file:
            if line.rstrip() == "station":
                sdxstation = list(islice(sdx_file, 5))
                stations.append([sdxstation[1].split()[0],
                                 float(sdxstation[2].split()[0]),
                                 float(sdxstation[3].split()[0]),
                                 float(sdxstation[4].split()[0])])
        sdx_file.close()

        # Find origin details, append to origin object
        sdx_file = open(sdx_file_path, "r")
        found_origin = False
        for line in sdx_file:
            if line.rstrip() == "origin":
                found_origin = True
                sdxorigin = list(islice(sdx_file, 17))
                orig_time = ("{:}T{:}".
                             format(sdxorigin[1][0:10].replace(".", "-"),
                                    sdxorigin[1][11:23]))
                evt_lat = float(sdxorigin[2].split()[0])
                evt_lon = float(sdxorigin[3].split()[0])
                evt_depth = float(sdxorigin[4].split()[0])
                creation_time = UTCDateTime(
                    "{:}T{:}".format(sdxorigin[16].split()[6][0:10]
                                     .replace(".", "-"),
                                     sdxorigin[16].split()[6][11:23]))
                num_arrivals = int(sdxorigin[12].split()[0])
                num_arrivals_p = (int(sdxorigin[12].split()[0]) -
                                  int(sdxorigin[12].split()[1]))
                min_dist = float(sdxorigin[12].split()[9])
                max_dist = float(sdxorigin[12].split()[10])
                med_dist = float(sdxorigin[12].split()[11])
                max_az_gap = float(sdxorigin[12].split()[6])

                origin = Origin(time=UTCDateTime(orig_time), longitude=evt_lon,
                                latitude=evt_lat, depth=evt_depth*-1000,
                                earth_model_id=vel_mod_id,
                                origin_type="hypocenter",
                                evaluation_mode="manual",
                                evaluation_status="confirmed",
                                method_id=ResourceIdentifier(id="SDX_hypo71"),
                                creation_info=CreationInfo(
                                    creation_time=creation_time, author=author,
                                    agency_id=agency_id),
                                quality=OriginQuality(
                                    associated_phase_count=num_arrivals,
                                    used_phase_count=num_arrivals,
                                    associated_station_count=num_arrivals_p,
                                    used_station_count=num_arrivals_p,
                                    azimuthal_gap=max_az_gap,
                                    minimum_distance=min_dist,
                                    maximum_distance=max_dist,
                                    median_distance=med_dist))
                event.origins.append(origin)

        sdx_file.close()

        # Skip event if no computed origin
        if found_origin is False:
            print("No origin found ... skipping event")
            continue

        # Get pick details, append to pick and arrival objects
        sdx_file = open(sdx_file_path, "r")
        found_pick = False
        for line in sdx_file:
            if line.rstrip() == "pick":
                found_pick = True
                sdxpick = list(islice(sdx_file, 15))
                pick_time = UTCDateTime(
                    "{:}T{:}".format(sdxpick[1][0:10].replace(".", "-"),
                                     sdxpick[1][11:23]))
                network = sdxpick[2].split()[0]
                station = sdxpick[2].split()[1]
                location = sdxpick[2].split()[2]
                if "NOT_SET" in location:
                    location = ""
                channel = sdxpick[2].split()[3]
                onset = sdxpick[8].split()[0]
                if onset == "0":
                    pickonset = "emergent"
                elif onset == "1":
                    pickonset = "impulsive"
                elif onset == "2":
                    pickonset = "questionable"
                phase = sdxpick[9].split()[0]
                polarity = sdxpick[10].split()[0]
                if polarity == "0":
                    pol = "positive"
                elif polarity == "1":
                    pol = "negative"
                elif polarity == "2":
                    pol = "undecidable"
                weight = int(sdxpick[11].split()[0])
                creation_time = UTCDateTime(
                    "{:}T{:}".format(sdxpick[14].split()[6][0:10]
                                     .replace(".", "-"),
                                     sdxpick[14].split()[6][11:23]))
                pick = Pick(time=pick_time,
                            waveform_id=WaveformStreamID(
                                network_code=network, station_code=station,
                                location_code=location, channel_code=channel),
                            time_errors=time_uncertainties[weight],
                            evaluation_mode="manual",
                            evaluation_status="confirmed", onset=pickonset,
                            phase_hint=phase, polarity=pol,
                            method_id=ResourceIdentifier(id="SDX"),
                            creation_info=CreationInfo(
                                creation_time=creation_time))
                event.picks.append(pick)

                # Compute azimuth, distance, append to arrival object
                for i in range(0, len(stations)):
                    if stations[i][0] == station:
                        azimuth = (gps2dist_azimuth(evt_lat, evt_lon,
                                                    stations[i][1],
                                                    stations[i][2])[1])
                        dist_deg = locations2degrees(evt_lat, evt_lon,
                                                     stations[i][1],
                                                     stations[i][2])
                        arrival = Arrival(phase=phase,
                                          pick_id=pick.resource_id,
                                          azimuth=azimuth, distance=dist_deg,
                                          time_weight=1.00)
                        event.origins[0].arrivals.append(arrival)

        # Skip event if no picks
        if found_pick is False:
            print("No picks found ... skipping event")
            continue

        # Set preferred origin and append event to catalogue
        event.preferred_origin_id = event.origins[0].resource_id
        cat.events.append(event)

        sdx_file.close()

    cat.write(out_xml, format="QUAKEML")
Exemplo n.º 24
0
AC.decimate(factor=4)
f_cutoff = 1.0

RLAS.filter('lowpass', freq=f_cutoff, corners=2, zerophase=True)
AC.filter('lowpass', freq=f_cutoff, corners=2, zerophase=True)

# event location from event info
source_latitude = event.origins[0].latitude
source_longitude = event.origins[0].longitude

# station location (Wettzell)
station_latitude = 49.144001
station_longitude = 12.8782

# theoretical backazimuth and distance
baz = gps2dist_azimuth(source_latitude, source_longitude, station_latitude, station_longitude)

print('Epicentral distance [m]: ',baz[0])
print('Theoretical azimuth [deg]: ', baz[1])
print('Theoretical backazimuth [deg]: ', baz[2])

# rotate E-N component seismometer recordings to radial[1]-transverse[0] components using the theoretical BAz
AC_original = AC.copy()
#normalize 
AC_original.normalize()
RLAS.normalize()
AC.normalize()
AC.rotate(method='NE->RT',back_azimuth=baz[2])

sampling_rate = int(RLAS[0].stats.sampling_rate)
time = np.linspace(0, len(AC[0].data)/sampling_rate,len(AC[0].data))
def fit_spectra(st, origin, kappa=0.035):
    """
    Fit spectra vaying stress_drop and kappa.

    Args:
        st (StationStream):
            Stream of data.
        origin (ScalarEvent):
             ScalarEvent object.
        kappa (float):
            Site diminution factor (sec). Typical value for active cruststal
            regions is about 0.03-0.04, and stable continental regions is about
            0.006.

    Returns:
        StationStream with fitted spectra parameters.
    """
    for tr in st:
        # Only do this for horizontal channels for which the smoothed spectra
        # has been computed.
        if ('Z' not in tr.stats['channel'].upper()) & \
                tr.hasParameter('smooth_signal_spectrum'):
            event_mag = origin.magnitude
            event_lon = origin.longitude
            event_lat = origin.latitude
            dist = gps2dist_azimuth(
                lat1=event_lat,
                lon1=event_lon,
                lat2=tr.stats['coordinates']['latitude'],
                lon2=tr.stats['coordinates']['longitude']
            )[0] * M_TO_KM

            # Use the smoothed spectra for fitting
            smooth_signal_dict = tr.getParameter('smooth_signal_spectrum')
            freq = np.array(smooth_signal_dict['freq'])
            obs_spec = np.array(smooth_signal_dict['spec'])

            # Loop over trial stress drops and kappas compute RMS fit
            # of the spectra
            rms = []
            rms_stress = []
            rms_f0 = []
            for i in range(len(TRIAL_STRESS_DROPS)):
                # Pick min f for cost function that is slightly less than
                # corner frequency.
                f0 = brune_f0(event_mag, TRIAL_STRESS_DROPS[i])
                fmin = FMIN_FAC * f0
                fmax = -np.log(FMAX_FAC) / np.pi / kappa

                rms_f0.append(f0)
                mod_spec = model(
                    freq, dist, kappa,
                    event_mag, TRIAL_STRESS_DROPS[i]
                )

                # Comput rms fit in log space, append to list
                log_residuals = (
                    np.log(obs_spec[(freq >= fmin) & (freq <= fmax)]) -
                    np.log(mod_spec[(freq >= fmin) & (freq <= fmax)])
                )
                rms.append(np.sqrt(np.mean((log_residuals)**2)))

                # Track the values of kappa and stress
                rms_stress.append(TRIAL_STRESS_DROPS[i])

            # Find the kappa-stress pair with best fit
            if not np.all(np.isnan(rms)):
                idx = np.where(rms == np.nanmin(rms))[0][0]
                fit_spectra_dict = {
                    'stress_drop': rms_stress[idx],
                    'epi_dist': dist,
                    'kappa': kappa,
                    'magnitude': event_mag,
                    'f0': rms_f0[idx]
                }
                tr.setParameter('fit_spectra', fit_spectra_dict)

    return st
Exemplo n.º 26
0
def Acces_Blindtest():
    # BLINDTEST_MSEED = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Database/data_Nienke/M3.5_8213363_deg_2019-02-15.mseed'
    BLINDTEST_MSEED = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Database/data_Nienke/M5.0_3914855_deg_2019-09-22.mseed'
    BLINDTEST_XML = BLINDTEST_MSEED.replace(".mseed", ".xml")

    # Initiate Parameters:
    get_parameters = Get_Paramters()
    PRIOR = get_parameters.get_prior()
    VALUES = get_parameters.specifications()
    VALUES['npts'] = 30000
    VALUES[
        'directory'] = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Blindtest'

    # st = read(VALUES['directory'] + '/bw.mseed')
    # st_reject = read(VALUES['directory'] + '/bw_reject.mseed')

    # Initiate the databases from instaseis:
    db = instaseis.open_db(PRIOR['VELOC'])
    tr_obs = obspy.read(BLINDTEST_MSEED)
    # tr_obs.plot(outfile=VALUES['directory'] + '/Observed')
    tr_obs.integrate()
    # tr_obs.plot(outfile=VALUES['directory'] + '/Observed_integrated')
    # source = instaseis.Source.parse(BLINDTEST_XML)
    blindtest = Blindtest()
    events = blindtest.get_events(BLINDTEST_XML)
    # get_parameters.get_prior_blindtest(events[0])
    time, depth, la_s, lo_s = blindtest.get_pref_origin(events[0])

    dist, az, baz = gps2dist_azimuth(lat1=la_s,
                                     lon1=lo_s,
                                     lat2=PRIOR['la_r'],
                                     lon2=PRIOR['lo_r'],
                                     a=PRIOR['radius'],
                                     f=0)
    epi = kilometer2degrees(dist, radius=PRIOR['radius'])
    PRIOR['az'] = az
    PRIOR['baz'] = baz
    PRIOR['epi']['range_min'] = epi - 5
    PRIOR['epi']['range_max'] = epi + 5
    PRIOR['epi']['spread'] = 1
    PRIOR['depth']['range_min'] = depth - 10000
    PRIOR['depth']['range_max'] = depth + 10000
    PRIOR['network'] = tr_obs.traces[0].meta.network
    PRIOR['location'] = tr_obs.traces[0].meta.location
    PRIOR['station'] = tr_obs.traces[0].meta.station
    est_noise = Create_observed(PRIOR, db)
    create = Source_code(PRIOR['VELOC_taup'])
    traces_obs, p_obs, s_obs, start_time_p, start_time_s = create.get_window_obspy(
        tr_obs, epi, depth, time, VALUES['npts'])
    PRIOR['var_est'] = est_noise.get_var_data(start_time_p, tr_obs)
    # time_at_receiver = create.get_receiver_time(epi,depth, time)
    plt.figure()

    catalog_path = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Additional_scripts/MQScatalog_withFrequencies/MQS_absolute_withFrequencyInfo.xml'
    catalog = Blindtest()
    events_catalog = catalog.get_events(catalog_path)

    for v in events_catalog:
        t, d, lat_ev, lo_ev = catalog.get_pref_origin(v)
        if time.date == t.date:
            Pick_event = v
            break
    PRIOR['M0'] = catalog.get_pref_scalarmoment(Pick_event)
    picks_surface = get_phase_picks(Pick_event, pick_type='surface')
    R_env_obs, L_env_obs = blindtest.pick_sw(tr_obs,
                                             picks_surface,
                                             epi,
                                             PRIOR,
                                             30000,
                                             VALUES['directory'],
                                             plot_modus=False)

    start_sample = create_starting_sample()
    strike = np.random.uniform(PRIOR['strike']['range_min'],
                               PRIOR['strike']['range_max'])
    dip = np.random.uniform(PRIOR['dip']['range_min'],
                            PRIOR['dip']['range_max'])
    rake = np.random.uniform(PRIOR['rake']['range_min'],
                             PRIOR['rake']['range_max'])
    sample_path = start_sample.get_sample_manual(
        epi, depth, strike, dip, rake,
        VALUES['directory'] + '/Blindtest_trialrun_sample.txt')
    # sample_path = '/home/nienke/Documents/Applied_geophysics/Thesis/anaconda/Blindtest/Blindtest_trialrun_sample.txt'
    mcmc = MCMC_stream(R_env_obs=R_env_obs,
                       L_env_obs=L_env_obs,
                       total_traces_obs=traces_obs,
                       P_traces_obs=p_obs,
                       S_traces_obs=s_obs,
                       PRIOR=PRIOR,
                       db=db,
                       specification_values=VALUES,
                       time_at_receiver=time,
                       start_sample_path=sample_path,
                       picked_events=picks_surface,
                       full_obs_trace=tr_obs,
                       P_start=start_time_p,
                       S_start=start_time_s)

    mcmc.start_MCMC(VALUES['directory'] + '/Blindtest_trialrun.txt')
Exemplo n.º 27
0
def plot_moveout(streams, epilat, epilon, channel, cmap='viridis',
                 figsize=None, file=None, minfontsize=14, normalize=False,
                 scale=1, title=None, xlabel=None, ylabel=None):
    """
    Create moveout plots.

    Args:
        stream (obspy.core.stream.Stream):
            Set of acceleration data with units of gal (cm/s/s).
        epilat (float):
            Epicenter latitude.
        epilon (float):
            Epicenter longitude.
        channel (list):
            List of channels (str) of each stream to view.
        cmap (str):
            Colormap name.
        figsize (tuple):
            Tuple of height and width. Default is None.
        file (str):
            File where the image will be saved. Default is None.
        minfontsize (int):
            Minimum font size. Default is 14.
        normalize (bool):
            Normalize the data. Default is faulse.
        scale (int, float):
            Value to scale the trace by. Default is 1.
        title (str):
            Title for plot. Default is None.
        xlabel (str):
            Label for x axis. Default is None.
        ylabel (str):
            Label for y axis. Default is None.

    Returns:
        tuple: (Figure, matplotlib.axes._subplots.AxesSubplot)
    """
    if len(streams) < 1:
        raise Exception('No streams provided.')

    colors = cm.get_cmap(cmap)
    color_array = colors(np.linspace(0, 1, len(streams)))
    if figsize is None:
        figsize = (10, len(streams))
    fig, ax = plt.subplots(figsize=figsize)
    for idx, stream in enumerate(streams):
        traces = stream.select(channel=channel)
        if len(traces) > 0:
            trace = traces[0]
            if normalize or scale != 1:
                warnings.filterwarnings("ignore", category=FutureWarning)
                trace.normalize()
            trace.data *= scale
            lat = trace.stats.coordinates['latitude']
            lon = trace.stats.coordinates['longitude']
            distance = gps2dist_azimuth(lat, lon, epilat, epilon)[0] / 1000
            times = []
            start = trace.stats.starttime
            for time in trace.times():
                starttime = start
                td = datetime.timedelta(seconds=time)
                ti = starttime + td
                times += [ti.datetime]
            label = trace.stats.network + '.' + \
                trace.stats.station + '.' + trace.stats.channel
            ax.plot(times, trace.data + distance, label=label,
                    color=color_array[idx])
    ax.invert_yaxis()
    ax.legend(bbox_to_anchor=(1, 1), fontsize=minfontsize)
    if title is None:
        title = ('Event on ' + str(starttime.month) + '/'
                 + str(starttime.day) + '/' + str(starttime.year))
        if scale != 1:
            title += ' scaled by ' + str(scale)
    if xlabel is None:
        xlabel = 'Time (H:M:S)'
    if ylabel is None:
        ylabel = 'Distance (km)'
    ax.set_title(title, fontsize=minfontsize + 4)
    ax.set_xlabel(xlabel, fontsize=minfontsize)
    ax.set_ylabel(ylabel, fontsize=minfontsize)
    ax.xaxis.set_tick_params(labelsize=minfontsize - 2)
    ax.yaxis.set_tick_params(labelsize=minfontsize - 2)
    if file is not None:
        fig.savefig(file, format='png')
    plt.show()
    return (fig, ax)
Exemplo n.º 28
0
def process(asdf_source, event_folder, output_path, min_magnitude, restart,
            save_quality_plots):
    """
    ASDF_SOURCE: Text file containing a list of paths to ASDF files
    EVENT_FOLDER: Path to folder containing event files\n
    OUTPUT_PATH: Output folder \n
    """

    comm = MPI.COMM_WORLD
    nproc = comm.Get_size()
    rank = comm.Get_rank()
    proc_workload = None

    if (rank == 0):

        def outputConfigParameters():
            # output config parameters
            fn = 'pick.%s.cfg' % (datetime.now().strftime('%Y-%m-%d-%H-%M-%S'))
            fn = os.path.join(output_path, fn)

            f = open(fn, 'w+')
            f.write('Parameter Values:\n\n')
            f.write('%25s\t\t: %s\n' % ('ASDF_SOURCE', asdf_source))
            f.write('%25s\t\t: %s\n' % ('EVENT_FOLDER', event_folder))
            f.write('%25s\t\t: %s\n' % ('OUTPUT_PATH', output_path))
            f.write('%25s\t\t: %s\n' % ('MIN_MAGNITUDE', min_magnitude))
            f.write('%25s\t\t: %s\n' %
                    ('RESTART_MODE', 'TRUE' if restart else 'FALSE'))
            f.write('%25s\t\t: %s\n' %
                    ('SAVE_PLOTS', 'TRUE' if save_quality_plots else 'FALSE'))
            f.close()

        # end func

        outputConfigParameters()
    # end if

    # ==================================================
    # Create output-folder for snr-plots
    # ==================================================
    plot_output_folder = None
    if (save_quality_plots):
        plot_output_folder = os.path.join(output_path, 'plots')
        if (rank == 0):
            if (not os.path.exists(plot_output_folder)):
                os.mkdir(plot_output_folder)
        # end if
        comm.Barrier()
    # end if

    # ==================================================
    # Read catalogue and retrieve origin times
    # ==================================================
    cat = CatalogCSV(event_folder)
    events = cat.get_events()
    originTimestamps = cat.get_preferred_origin_timestamps()

    # ==================================================
    # Create lists of pickers for both p- and s-arrivals
    # ==================================================
    sigmalist = np.arange(8, 3, -1)
    pickerlist_p = []
    pickerlist_s = []
    for sigma in sigmalist:
        picker_p = aicdpicker.AICDPicker(t_ma=5,
                                         nsigma=sigma,
                                         t_up=1,
                                         nr_len=5,
                                         nr_coeff=2,
                                         pol_len=10,
                                         pol_coeff=10,
                                         uncert_coeff=3)
        picker_s = aicdpicker.AICDPicker(t_ma=15,
                                         nsigma=sigma,
                                         t_up=1,
                                         nr_len=5,
                                         nr_coeff=2,
                                         pol_len=10,
                                         pol_coeff=10,
                                         uncert_coeff=3)

        pickerlist_p.append(picker_p)
        pickerlist_s.append(picker_s)
    # end for

    # ==================================================
    # Define theoretical model
    # Instantiate data-access object
    # Retrieve estimated workload
    # ==================================================
    taupyModel = TauPyModel(model='iasp91')
    fds = FederatedASDFDataSet(asdf_source, use_json_db=False, logger=None)
    workload = getWorkloadEstimate(fds, originTimestamps)

    # ==================================================
    # Define output header and open output files
    # depending on the mode of operation (fresh/restart)
    # ==================================================
    header = '#eventID originTimestamp mag originLon originLat originDepthKm net sta cha pickTimestamp stationLon stationLat az baz distance ttResidual snr qualityMeasureCWT domFreq qualityMeasureSlope bandIndex nSigma\n'
    ofnp = os.path.join(output_path, 'p_arrivals.%d.txt' % (rank))
    ofns = os.path.join(output_path, 's_arrivals.%d.txt' % (rank))
    ofp = None
    ofs = None
    if (restart == False):
        ofp = open(ofnp, 'w+')
        ofs = open(ofns, 'w+')
        ofp.write(header)
        ofs.write(header)
    else:
        ofp = open(ofnp, 'a+')
        ofs = open(ofns, 'a+')
    # end if

    progTracker = ProgressTracker(output_folder=output_path,
                                  restart_mode=restart)
    totalTraceCount = 0
    for nc, sc, start_time, end_time in fds.local_net_sta_list():
        day = 24 * 3600
        dayCount = 0
        curr = start_time
        traceCountP = 0
        pickCountP = 0
        traceCountS = 0
        pickCountS = 0
        sw_start = datetime.now()
        step = day
        while (curr < end_time):
            if (curr + step > end_time):
                step = end_time - curr
            # end if

            eventIndices = (np.where((originTimestamps >= curr.timestamp) & \
                                     (originTimestamps <= (curr + day).timestamp)))[0]

            if (eventIndices.shape[0] > 0):
                totalTraceCount += 1
                stations = fds.get_stations(curr,
                                            curr + day,
                                            network=nc,
                                            station=sc)
                stations_zch = [s for s in stations
                                if 'Z' in s[3]]  # only Z channels
                stations_nch = [
                    s for s in stations if 'N' in s[3] or '1' in s[3]
                ]  # only N channels
                stations_ech = [
                    s for s in stations if 'E' in s[3] or '2' in s[3]
                ]  # only E channels

                for codes in stations_zch:
                    if (progTracker.increment()): pass
                    else: continue

                    st = fds.get_waveforms(codes[0],
                                           codes[1],
                                           codes[2],
                                           codes[3],
                                           curr,
                                           curr + step,
                                           automerge=True,
                                           trace_count_threshold=200)

                    if (len(st) == 0): continue
                    dropBogusTraces(st)

                    slon, slat = codes[4], codes[5]
                    for ei in eventIndices:
                        event = events[ei]
                        po = event.preferred_origin
                        da = gps2dist_azimuth(po.lat, po.lon, slat, slon)
                        mag = None
                        if (event.preferred_magnitude):
                            mag = event.preferred_magnitude.magnitude_value
                        elif (len(po.magnitude_list)):
                            mag = po.magnitude_list[0].magnitude_value
                        if (mag == None): mag = np.NaN

                        if (np.isnan(mag) or mag < min_magnitude): continue

                        result = extract_p(
                            taupyModel,
                            pickerlist_p,
                            event,
                            slon,
                            slat,
                            st,
                            plot_output_folder=plot_output_folder)
                        if (result):
                            picklist, residuallist, snrlist, bandindex, pickerindex = result

                            arcdistance = kilometers2degrees(da[0] / 1e3)
                            for ip, pick in enumerate(picklist):
                                line = '%s %f %f %f %f %f ' \
                                       '%s %s %s %f %f %f ' \
                                       '%f %f %f ' \
                                       '%f %f %f %f %f '\
                                       '%d %d\n' % (event.public_id, po.utctime.timestamp, mag, po.lon, po.lat, po.depthkm,
                                                    codes[0], codes[1], codes[3], pick.timestamp, slon, slat,
                                                    da[1], da[2], arcdistance,
                                                    residuallist[ip], snrlist[ip, 0], snrlist[ip, 1], snrlist[ip, 2], snrlist[ip, 3],
                                                    bandindex, sigmalist[pickerindex])
                                ofp.write(line)
                            # end for
                            ofp.flush()
                            pickCountP += 1
                        # end if

                        if (len(stations_nch) == 0 and len(stations_ech) == 0):
                            result = extract_s(
                                taupyModel,
                                pickerlist_s,
                                event,
                                slon,
                                slat,
                                st,
                                None,
                                da[2],
                                plot_output_folder=plot_output_folder)
                            if (result):
                                picklist, residuallist, snrlist, bandindex, pickerindex = result

                                arcdistance = kilometers2degrees(da[0] / 1e3)
                                for ip, pick in enumerate(picklist):
                                    line = '%s %f %f %f %f %f ' \
                                           '%s %s %s %f %f %f ' \
                                           '%f %f %f ' \
                                           '%f %f %f %f %f ' \
                                           '%d %d\n' % (event.public_id, po.utctime.timestamp, mag, po.lon, po.lat, po.depthkm,
                                                        codes[0], codes[1], codes[3], pick.timestamp, slon, slat,
                                                        da[1], da[2], arcdistance,
                                                        residuallist[ip], snrlist[ip, 0], snrlist[ip, 1], snrlist[ip, 2], snrlist[ip, 3],
                                                        bandindex, sigmalist[pickerindex])
                                    ofs.write(line)
                                # end for
                                ofs.flush()
                                pickCountS += 1
                            # end if
                        # end if
                    # end for

                    traceCountP += len(st)
                # end for

                if (len(stations_nch) > 0
                        and len(stations_nch) == len(stations_ech)):
                    for codesn, codese in zip(stations_nch, stations_ech):
                        if (progTracker.increment()): pass
                        else: continue

                        stn = fds.get_waveforms(codesn[0],
                                                codesn[1],
                                                codesn[2],
                                                codesn[3],
                                                curr,
                                                curr + step,
                                                automerge=True,
                                                trace_count_threshold=200)
                        ste = fds.get_waveforms(codese[0],
                                                codese[1],
                                                codese[2],
                                                codese[3],
                                                curr,
                                                curr + step,
                                                automerge=True,
                                                trace_count_threshold=200)

                        dropBogusTraces(stn)
                        dropBogusTraces(ste)

                        if (len(stn) == 0): continue
                        if (len(ste) == 0): continue

                        slon, slat = codesn[4], codesn[5]

                        for ei in eventIndices:
                            event = events[ei]
                            po = event.preferred_origin
                            da = gps2dist_azimuth(po.lat, po.lon, slat, slon)

                            mag = None
                            if (event.preferred_magnitude):
                                mag = event.preferred_magnitude.magnitude_value
                            elif (len(po.magnitude_list)):
                                mag = po.magnitude_list[0].magnitude_value
                            if (mag == None): mag = np.NaN

                            if (np.isnan(mag) or mag < min_magnitude): continue

                            result = extract_s(
                                taupyModel,
                                pickerlist_s,
                                event,
                                slon,
                                slat,
                                stn,
                                ste,
                                da[2],
                                plot_output_folder=plot_output_folder)
                            if (result):
                                picklist, residuallist, snrlist, bandindex, pickerindex = result

                                arcdistance = kilometers2degrees(da[0] / 1e3)
                                for ip, pick in enumerate(picklist):
                                    line = '%s %f %f %f %f %f ' \
                                           '%s %s %s %f %f %f ' \
                                           '%f %f %f ' \
                                           '%f %f %f %f %f ' \
                                           '%d %d\n' % (event.public_id, po.utctime.timestamp, mag, po.lon, po.lat, po.depthkm,
                                                        codesn[0], codesn[1], '00T', pick.timestamp, slon, slat,
                                                        da[1], da[2], arcdistance,
                                                        residuallist[ip], snrlist[ip, 0], snrlist[ip, 1], snrlist[ip, 2], snrlist[ip, 3],
                                                        bandindex, sigmalist[pickerindex])
                                    ofs.write(line)
                                # end for
                                ofs.flush()
                                pickCountS += 1
                            # end if
                        # end for

                        traceCountS += (len(stn) + len(ste))
                    # end for
                # end if
            # end if
            curr += step
            dayCount += 1
        # wend
        sw_stop = datetime.now()
        totalTime = (sw_stop - sw_start).total_seconds()

        gc.collect()
        print '(Rank %d: %5.2f%%, %d/%d) Processed %d traces and found %d p-arrivals and %d s-arrivals for ' \
              'network %s station %s in %f s. Memory usage: %5.2f MB.' % \
              (rank, (float(totalTraceCount) / float(workload) * 100) if workload > 0 else 100, totalTraceCount, workload,
               traceCountP + traceCountS, pickCountP, pickCountS, nc, sc, totalTime,
               round(psutil.Process().memory_info().rss / 1024. / 1024., 2))
    # end for
    ofp.close()
    ofs.close()

    print 'Processing complete on rank %d' % (rank)

    del fds
Exemplo n.º 29
0
def plot_moveout(streams,
                 epilat,
                 epilon,
                 orientation=None,
                 max_dist=None,
                 figsize=(10, 15),
                 file=None,
                 minfontsize=14,
                 normalize=True,
                 factor=0.2,
                 alpha=0.25):
    """
    Create moveout plot.

    Args:
        streams (StreamCollection):
            StreamCollection of acceleration data with units of gal (cm/s/s).
        epilat (float):
            Epicenter latitude.
        epilon (float):
            Epicenter longitude.
        orientation (str):
            Orientation code (str) of each stream to view. Default is None.
            If None, then the orientation code with the highest number of
            traces will be used.
        max_dist (float):
            Maximum distance (in km) to plot. Default is 200 km.
        figsize (tuple):
            Tuple of height and width. Default is (10, 15).
        file (str):
            File where the image will be saved. Default is None.
        minfontsize (int):
            Minimum font size. Default is 14.
        normalize (bool):
            Normalize the data. Default is True.
        factor (int, float):
            Factor for scaling the trace. Default is 0.2, meaning that the
            trace with the greatest amplitude variation will occupy 20% of the
            vertical space in the plot.
        alpha (float):
            Alpha value for plotting the traces.

    Returns:
        tuple: (Figure, matplotlib.axes._subplots.AxesSubplot)
    """
    if len(streams) < 1:
        raise Exception('No streams provided.')

    fig, ax = plt.subplots(figsize=figsize)

    # If no channel is given, then find the orientation code with the greatest
    # number of traces
    if orientation is None:
        orientation_codes = []
        for st in streams:
            if st.passed:
                for tr in st:
                    orientation_codes.append(tr.stats.channel[-1])
        for i, code in enumerate(orientation_codes):
            if code == '1':
                orientation_codes[i] = 'N'
            if code == '2':
                orientation_codes[i] = 'E'
            if code == '3':
                orientation_codes[i] = 'Z'
        channel_counter = Counter(orientation_codes)
        if channel_counter:
            orientation = max(channel_counter, key=channel_counter.get)
        else:
            return (fig, ax)

    valid_channels = []
    if orientation in ['N', '1']:
        valid_channels = ['N', '1']
    elif orientation in ['E', '2']:
        valid_channels = ['E', '2']
    elif orientation in ['Z', '3']:
        valid_channels = ['Z', '3']

    # Create a copy of the streams to avoid modifying the data when normalizing
    streams_copy = copy.deepcopy(streams)

    # Determine the distance and amplitude variation for scaling
    distances = []
    max_amp_variation = 0
    for st in streams:
        if st.passed:
            dist = gps2dist_azimuth(st[0].stats.coordinates['latitude'],
                                    st[0].stats.coordinates['longitude'],
                                    epilat, epilon)[0] / 1000
            max_amp_var_st = 0
            for tr in st:
                amp_var_tr = abs(max(tr.data) - min(tr.data))
                if normalize:
                    amp_var_tr *= dist
                if amp_var_tr > max_amp_var_st:
                    max_amp_var_st = amp_var_tr
            if max_dist is not None:
                if dist < max_dist:
                    distances.append(dist)
                    if max_amp_var_st > max_amp_variation:
                        max_amp_variation = max_amp_var_st
            else:
                distances.append(dist)
                if max_amp_var_st > max_amp_variation:
                    max_amp_variation = max_amp_var_st

    if distances:
        scale = max(distances) * factor / max_amp_variation
    else:
        return (fig, ax)

    nplot = 0
    for idx, stream in enumerate(streams_copy):
        if not stream.passed:
            continue
        for trace in stream:
            if trace.stats.channel[-1] not in valid_channels:
                continue
            lat = trace.stats.coordinates['latitude']
            lon = trace.stats.coordinates['longitude']
            distance = gps2dist_azimuth(lat, lon, epilat, epilon)[0] / 1000

            # Don't plot if past the maximum distance
            if max_dist is not None and distance > max_dist:
                continue

            # Multiply by distance to normalize
            if normalize:
                trace.data = trace.data.astype(np.float) * distance
            trace.data *= scale

            times = []
            start = trace.stats.starttime
            for time in trace.times():
                starttime = start
                td = datetime.timedelta(seconds=time)
                ti = starttime + td
                times += [ti.datetime]
            ax.plot(times, trace.data + distance, c='k', alpha=alpha)
            nplot += 1
    ax.invert_yaxis()
    ax.set_title('Orientation code: %s' % orientation,
                 fontsize=minfontsize + 4)
    ax.set_ylabel('Epicentral distance (km)', fontsize=minfontsize)
    ax.yaxis.set_tick_params(labelsize=minfontsize - 2)
    plt.xticks([])

    # Get the x-coordinate for the time bar
    if nplot > 0:
        xmin, xmax = ax.get_xlim()
        xbar = num2date(xmin + 0.9 * (xmax - xmin))
        xlabel = num2date(xmin + 0.83 * (xmax - xmin))

        # Get the y-coordinates for the time bar and label
        ymax, ymin = ax.get_ylim()
        ybar = 0
        ylabel = 0.05 * (ymax - ymin)

        # Plot the time-scale bar
        plt.errorbar(xbar,
                     ybar,
                     xerr=datetime.timedelta(seconds=15),
                     color='k',
                     capsize=5)
        plt.text(xlabel, ylabel, '30 seconds', fontsize=minfontsize)

    if file is not None:
        fig.savefig(file, format='png')
    plt.show()
    return (fig, ax)
Exemplo n.º 30
0
    def _event_station_metrics(self, event):
        self.eventid = event.id
        logging.info('Computing station metrics for event %s...' %
                     self.eventid)
        event_dir = os.path.join(self.gmrecords.data_path, self.eventid)
        workname = os.path.join(event_dir, WORKSPACE_NAME)
        if not os.path.isfile(workname):
            logging.info(
                'No workspace file found for event %s. Please run '
                'subcommand \'assemble\' to generate workspace file.' %
                self.eventid)
            logging.info('Continuing to next event.')
            return event.id

        self.workspace = StreamWorkspace.open(workname)
        self._get_pstreams()

        if not (hasattr(self, 'pstreams') and len(self.pstreams) > 0):
            logging.info('No streams found. Nothing to do. Goodbye.')
            self.workspace.close()
            return event.id

        rupture_file = get_rupture_file(event_dir)
        origin = Origin({
            'id': self.eventid,
            'netid': '',
            'network': '',
            'lat': event.latitude,
            'lon': event.longitude,
            'depth': event.depth_km,
            'locstring': '',
            'mag': event.magnitude,
            'time': event.time
        })
        self.origin = origin
        rupture = get_rupture(origin, rupture_file)

        sta_lats = []
        sta_lons = []
        sta_elev = []
        self.sta_repi = []
        self.sta_rhyp = []
        self.sta_baz = []
        for st in self.pstreams:
            sta_lats.append(st[0].stats.coordinates.latitude)
            sta_lons.append(st[0].stats.coordinates.longitude)
            sta_elev.append(st[0].stats.coordinates.elevation)
            geo_tuple = gps2dist_azimuth(st[0].stats.coordinates.latitude,
                                         st[0].stats.coordinates.longitude,
                                         origin.lat, origin.lon)
            self.sta_repi.append(geo_tuple[0] / M_PER_KM)
            self.sta_baz.append(geo_tuple[1])
            self.sta_rhyp.append(
                distance(st[0].stats.coordinates.longitude,
                         st[0].stats.coordinates.latitude,
                         -st[0].stats.coordinates.elevation / M_PER_KM,
                         origin.lon, origin.lat, origin.depth))

        if isinstance(rupture, PointRupture):
            self._get_ps2ff_splines()
            rjb_hat = self.rjb_spline(self.sta_repi)
            rjb_mean = rjb_hat[0]
            rjb_var = rjb_hat[1]
            rrup_hat = self.rrup_spline(self.sta_repi)
            rrup_mean = rrup_hat[0]
            rrup_var = rrup_hat[1]
            gc2_rx = np.full_like(rjb_mean, np.nan)
            gc2_ry = np.full_like(rjb_mean, np.nan)
            gc2_ry0 = np.full_like(rjb_mean, np.nan)
            gc2_U = np.full_like(rjb_mean, np.nan)
            gc2_T = np.full_like(rjb_mean, np.nan)
        else:
            logging.info('******************************')
            logging.info('* Found rupture              *')
            logging.info('******************************')
            sta_lons = np.array(sta_lons)
            sta_lats = np.array(sta_lats)
            elev = np.full_like(sta_lons, ELEVATION_FOR_DISTANCE_CALCS)
            rrup_mean, rrup_var = rupture.computeRrup(sta_lons, sta_lats, elev)
            rjb_mean, rjb_var = rupture.computeRjb(sta_lons, sta_lats, elev)
            rrup_var = np.full_like(rrup_mean, np.nan)
            rjb_var = np.full_like(rjb_mean, np.nan)
            gc2_dict = rupture.computeGC2(sta_lons, sta_lats, elev)
            gc2_rx = gc2_dict['rx']
            gc2_ry = gc2_dict['ry']
            gc2_ry0 = gc2_dict['ry0']
            gc2_U = gc2_dict['U']
            gc2_T = gc2_dict['T']

            # If we don't have a point rupture, then back azimuth needs
            # to be calculated to the closest point on the rupture
            self.sta_baz = []
            for i in range(len(self.pstreams)):
                dists = []
                bazs = []
                for quad in rupture._quadrilaterals:
                    P0, P1, P2, P3 = quad
                    for point in [P0, P1]:
                        dist, az, baz = gps2dist_azimuth(
                            point.y, point.x, sta_lats[i], sta_lons[i])
                        dists.append(dist)
                        bazs.append(baz)
                self.sta_baz.append(bazs[np.argmin(dists)])

        for i, stream in enumerate(self.pstreams):
            logging.info('Calculating station metrics for %s...' %
                         stream.get_id())
            summary = StationSummary.from_config(stream,
                                                 event=event,
                                                 config=self.gmrecords.conf,
                                                 calc_waveform_metrics=False,
                                                 calc_station_metrics=False,
                                                 rupture=rupture,
                                                 vs30_grids=self.vs30_grids)

            summary._distances = {
                'epicentral': self.sta_repi[i],
                'hypocentral': self.sta_rhyp[i],
                'rupture': rrup_mean[i],
                'rupture_var': rrup_var[i],
                'joyner_boore': rjb_mean[i],
                'joyner_boore_var': rjb_var[i],
                'gc2_rx': gc2_rx[i],
                'gc2_ry': gc2_ry[i],
                'gc2_ry0': gc2_ry0[i],
                'gc2_U': gc2_U[i],
                'gc2_T': gc2_T[i]
            }
            summary._back_azimuth = self.sta_baz[i]
            if self.vs30_grids is not None:
                for vs30_name in self.vs30_grids.keys():
                    tmpgrid = self.vs30_grids[vs30_name]
                    summary._vs30[vs30_name] = {
                        'value':
                        tmpgrid['grid_object'].getValue(
                            float(sta_lats[i]), float(sta_lons[i])),
                        'column_header':
                        tmpgrid['column_header'],
                        'readme_entry':
                        tmpgrid['readme_entry'],
                        'units':
                        tmpgrid['units']
                    }

            xmlstr = summary.get_station_xml()
            metricpath = '/'.join([
                format_netsta(stream[0].stats),
                format_nslit(stream[0].stats, stream.get_inst(), self.eventid)
            ])
            self.workspace.insert_aux(xmlstr,
                                      'StationMetrics',
                                      metricpath,
                                      overwrite=self.gmrecords.args.overwrite)
            logging.info('Added station metrics to workspace files '
                         'with tag \'%s\'.' % self.gmrecords.args.label)

        self.workspace.close()
        return event.id
Exemplo n.º 31
0
def dump_picks(event_log,vel_model,gf_list,out_file):
    '''  
    Dump P and S picks to a file
    '''
    
    from obspy.taup import TauPyModel
    from obspy.geodetics.base import gps2dist_azimuth
    from obspy.geodetics import locations2degrees
    from numpy import genfromtxt,zeros,array,ones
    from string import replace
    
    
    #Read station locations
    sta=genfromtxt(gf_list,usecols=0,dtype='S')
    lonlat=genfromtxt(gf_list,usecols=[1,2])
    
    #Load velocity model for ray tracing
    velmod = TauPyModel(vel_model)
    
    # Get hypocenter
    f=open(event_log,'r')
    loop_go=True
    while loop_go:
        line=f.readline()
        if 'Hypocenter (lon,lat,z[km])' in line:
            s=replace(line.split(':')[-1],'(','')
            s=replace(s,')','')
            hypo=array(s.split(',')).astype('float')
            loop_go=False

    #compute station to hypo distances
    d=zeros(len(lonlat))
    for k in range(len(lonlat)):
        d[k],az,baz=gps2dist_azimuth(lonlat[k,1],lonlat[k,0],hypo[1],hypo[0])
        d[k]=d[k]/1000
        

    f=open(out_file,'w')
    f.write('# sta,lon,lat,ptime(s),stime(s)\n')
    
    for k in range(len(sta)):
        
        
        # Ray trace
        deg=locations2degrees(hypo[1],hypo[0],lonlat[k,1],lonlat[k,0])
        try:
            arrivals = velmod.get_travel_times(source_depth_in_km=hypo[2],distance_in_degree=deg,phase_list=['P','Pn','S','Sn','p','s'])
        except:
            arrivals = velmod.get_travel_times(source_depth_in_km=hypo[2]-1.056,distance_in_degree=deg,phase_list=['P','Pn','S','Sn','p','s'])

        ptime=1e6
        stime=1e6
            
        #Determine P and S arrivals
        for kphase in range(len(arrivals)):
            if 'P' == arrivals[kphase].name or 'p' == arrivals[kphase].name or 'Pn' == arrivals[kphase].name:
                if arrivals[kphase].time<ptime:
                    ptime=arrivals[kphase].time
            if 'S' == arrivals[kphase].name or 's' == arrivals[kphase].name or 'Sn' == arrivals[kphase].name:
                if arrivals[kphase].time<stime:
                    stime=arrivals[kphase].time
            
        lon=lonlat[k,0]
        lat=lonlat[k,1] 
        station=sta[k]       
        line='%s\t%.4f\t%.4f\t%10.4f\t%10.4f\n' % (station,lon,lat,ptime,stime)
        f.write(line)
        
    f.close()
Exemplo n.º 32
0
def main():
    parser = argparse.ArgumentParser(
        description='Create a 1D initial model'
        ' based on CRUST 1.0 and MEAN reference model.')
    parser.add_argument('file_station',
                        help='file for stations in the subarray')
    parser.add_argument('--zmax',
                        default=200.0,
                        type=float,
                        help='zmax of target model')
    parser.add_argument('--nc',
                        default=10,
                        type=int,
                        help='number of layers for the crust')
    parser.add_argument('--nm',
                        default=10,
                        type=int,
                        help='number of layers for the mantle')
    args = parser.parse_args()
    file_station = args.file_station
    zmax = args.zmax
    nc = args.nc
    nm = args.nm

    crust_model = load_crust1()
    mean_model = load_mean_model()
    stations = []
    with open(file_station, 'r') as fp:
        for line in fp:
            stations.append(np.array(line.split()[1:]).astype(np.float))
    stations = np.asarray(stations)

    c_lat, c_lon = np.mean(stations, axis=0)
    # plt.figure()
    # plt.plot(c_lon, c_lat, 'ro')
    # plt.plot(stations[:, 1], stations[:, 0], 'k.')
    # plt.xlabel('latitude')
    # plt.ylabel('longitude')
    # plt.tight_layout()
    # plt.show()

    dists = []
    for lat, lon in stations:
        dist, _, _ = gps2dist_azimuth(lat, lon, c_lat, c_lon)
        dists.append(dist)
    weight1 = 1.0 / np.asarray(dists)

    cms = []
    weight = []
    for i, (lat, lon) in enumerate(stations):
        cm, ice_exist = find_crust(crust_model, lat, lon, nc)
        if cm is None:
            continue
        weight.append(weight1[i])
        plt.step(cm[:, 1], cm[:, 0])
        dmax_crust = cm[-1, 0]
        itm = np.argwhere(mean_model[:, 0] > dmax_crust)[0][0]
        ibm = np.argwhere(mean_model[:, 0] <= 60.0)[-1][0]
        if ibm > itm:
            cm_ext = mean_model[itm:ibm, :]
            cm = np.vstack([cm, cm_ext])
        cm2 = extend_model(cm)
        cm = interp_model(cm2, nc, ice_exist)
        cms.append(cm)
    plt.gca().invert_yaxis()
    cms = np.asarray(cms)
    cm = np.zeros((cms.shape[1], cms.shape[2]))
    cm[:, 0] = cms[0, :, 0]
    weight = np.asarray(weight)
    weight = weight / weight.sum()
    for i in range(3):
        cm[:, 1 + i] = np.average(cms[:, :, 1 + i], weights=weight, axis=0)

    itm = np.argwhere(mean_model[:, 0] <= 60.0)[-1][0]
    ibm = np.argwhere(mean_model[:, 0] < zmax * 1.2)[-1][0]
    mm = mean_model[itm:ibm, :]
    mm = extend_model(mm)
    mm = interp_model(mm, nm)
    model_new = np.vstack([cm, mm])

    with open('model_init.txt', 'w') as fp:
        for i, row in enumerate(model_new):
            fp.write(('{:5d}' + '{:9.2f}' * 4 + '\n').format(i + 1, *row))

    plt.figure()
    vs = model_new[:, 2]
    z = model_new[:, 0]
    plt.step(vs, z)
    plt.ylim([0, zmax])
    plt.xlabel('Vs (km/s)')
    plt.ylabel('Depth (km)')
    plt.gca().invert_yaxis()
    plt.show()
Exemplo n.º 33
0
def proceve(eve, sta, debug=False):
    
    try:
        coords = sp.get_coordinates(net + '.' + sta + '.00.LHZ', eve.origins[0].time)
    except:
        return
    (dis, azi, bazi) = gps2dist_azimuth(coords['latitude'], coords['longitude'], 
                                            eve.origins[0].latitude,eve.origins[0].longitude)
        
    # Now in km
    dis *= 1./1000.
    disdeg = dis*0.0089932
    # Check for events way outside of our interested window
    if disdeg <=50. or disdeg >=120.:
        return
    fstring = 'NEWRESULTS2/' + net + '_' + sta + '_' + str(eve.origins[0].time.year) + '_' + str(eve.origins[0].time.julday).zfill(3) + \
            '_' + str(eve.origins[0].time.hour).zfill(2) + '_' + str(eve.origins[0].time.minute).zfill(2) + \
            '_Results.csv'
    feve = open(fstring,'w')
    feve.write('sta, loc, year, day, f0, distance, azimuth, corr, mag, mHV, stdHV, phase, pV, pRV, PR, pNV, PN, pEV, pE \n')
    if debug:
        print('Distance: ' + str(dis))
        print('Azimuth: ' + str(azi))
    # compute arrival start and end times 620 s window
    astime = eve.origins[0].time + int(dis/4.)-30.
    aetime = astime + window-30.

    # Grab the data now have trimmed RT data in velocity with filter
    locs = glob.glob('/msd/' + net + '_' + sta + '/' + str(astime.year) + '/'
                    + '/' + str(astime.julday).zfill(3) + '/*LHZ*')
    locs = [(loc.split('/')[-1]).split('_')[0] for loc in locs]
    for loc in locs:
        try:
        #if True:
            if debug:
                print('Grabbing the event data')
            st = grabdata(astime, aetime, bazi, sp, sta, net, loc)
            
        except:
            print('No data for: ' + sta)
            continue
        if debug:
            print(Noisest)
            print(st)
        
        for f0 in f0s:

            st2 = finalfilter(st,0.,bazi,astime,aetime,True)
            # Here is our data in the ZNE directions so no rotation
            st2ZNE = finalfilter(st,0.,0.,astime,aetime, False)
            st2 += st2ZNE
            st2.merge()
            
            if st2[0].stats.npts < .9*window:
                continue
            # We now have a good event with high SNR
            
            mHV, stdHV, phase, med = HVscheme2(st, f0, bazi, astime, aetime, disdeg, eve)
            #if ((mHV == 0) & (stdHV == 0) & (phase == 0) & (med == 0)):
            #    continue
            print('Here is mHV:' + str(mHV))
            pV = np.fft.rfft(st2.select(component="Z")[0].data)
            freqmin = f0/np.sqrt(2.)
            freqmax = f0*np.sqrt(2.)
            freqs = np.fft.rfftfreq(st2[0].stats.npts)
            pV = pV[(freqs <= freqmax) & (freqs >= freqmin)]
            feve.write(sta + ', ' + loc + ', ' + str(eve.origins[0].time.year) +', ' + 
                            str(eve.origins[0].time.julday).zfill(3) + ', ' + str(f0) + ', ' + 
                            str(disdeg) + ', ' + str(azi) + ', ' + str(eve.magnitudes[0].mag)
                            + ', ' + str(mHV) + ', ' + str(med) + ', ' + str(stdHV) + ', '+ str(phase) + ', ' + str(np.mean(np.real(pV))))
            for comp in ['R', 'N', 'E']:
                pC = np.fft.rfft(st2.select(component=comp)[0].data)
                pC = pC[(freqs <= freqmax) & (freqs >= freqmin)]
                pM = np.mean(np.real(pC))
                pCV = np.mean(np.abs(pC)/np.abs(np.real(pV)))
                feve.write( ', ' + str(pCV) + ', ' + str(pM))
            feve.write(' \n')
                            
    feve.close()        
    num_lines = sum(1 for line in open(fstring))
    if num_lines <= 1:
        os.remove(fstring)
    return
Exemplo n.º 34
0
def sdxtoquakeml(sdx_dir,
                 out_xml,
                 time_uncertainties=[0.1, 0.2, 0.5, 0.8, 1.5],
                 catalog_description="",
                 catalog_version="",
                 agency_id="",
                 author="",
                 vel_mod_id=""):
    """
    Convert SDX to QuakeML format using ObsPy inventory structure.
    SDX filename prefix is stored under event description.
    Input parameters:
        - sdx_dir: directory containing sdx files (required)
        - out_xml: Filename of quakeML file (required)
        - time_uncertainties: List containing time uncertainities in seconds
          for mapping from weights 0-4, respectively (optional)
        - catalog_description (optional)
        - cat_agency_id (optional)
        - author (optional)
        - vel_mod_id (optional)
    Output:
        - xml catalog in QuakeML format.
    """

    # Prepare catalog
    cat = Catalog(description=catalog_description,
                  creation_info=CreationInfo(author=author,
                                             agency_id=agency_id,
                                             version=catalog_version))

    # Read in sdx files in directory, recursively
    files = glob.glob("{:}/**/*.sdx".format(sdx_dir), recursive=True)
    if len(files) == 0:
        print("No SDX files found in path. Exiting")
    for sdx_file_path in files:
        print("Working on ", sdx_file_path.split('/')[-1])

        # Set-up event
        evt_id = (sdx_file_path.split('/')[-1])[:-4]
        event = Event(event_type="earthquake",
                      creation_info=CreationInfo(author=author,
                                                 agency_id=agency_id),
                      event_descriptions=[EventDescription(text=evt_id)])

        # Get station details, append to arrays
        sdx_file = open(sdx_file_path, "r")
        stations = []
        for line in sdx_file:
            if line.rstrip() == "station":
                sdxstation = list(islice(sdx_file, 5))
                stations.append([
                    sdxstation[1].split()[0],
                    float(sdxstation[2].split()[0]),
                    float(sdxstation[3].split()[0]),
                    float(sdxstation[4].split()[0])
                ])
        sdx_file.close()

        # Find origin details, append to origin object
        sdx_file = open(sdx_file_path, "r")
        found_origin = False
        for line in sdx_file:
            if line.rstrip() == "origin":
                found_origin = True
                sdxorigin = list(islice(sdx_file, 17))
                orig_time = ("{:}T{:}".format(
                    sdxorigin[1][0:10].replace(".", "-"), sdxorigin[1][11:23]))
                evt_lat = float(sdxorigin[2].split()[0])
                evt_lon = float(sdxorigin[3].split()[0])
                evt_depth = float(sdxorigin[4].split()[0])
                creation_time = UTCDateTime("{:}T{:}".format(
                    sdxorigin[16].split()[6][0:10].replace(".", "-"),
                    sdxorigin[16].split()[6][11:23]))
                num_arrivals = int(sdxorigin[12].split()[0])
                num_arrivals_p = (int(sdxorigin[12].split()[0]) -
                                  int(sdxorigin[12].split()[1]))
                min_dist = float(sdxorigin[12].split()[9])
                max_dist = float(sdxorigin[12].split()[10])
                med_dist = float(sdxorigin[12].split()[11])
                max_az_gap = float(sdxorigin[12].split()[6])

                origin = Origin(time=UTCDateTime(orig_time),
                                longitude=evt_lon,
                                latitude=evt_lat,
                                depth=evt_depth * -1000,
                                earth_model_id=vel_mod_id,
                                origin_type="hypocenter",
                                evaluation_mode="manual",
                                evaluation_status="confirmed",
                                method_id=ResourceIdentifier(id="SDX_hypo71"),
                                creation_info=CreationInfo(
                                    creation_time=creation_time,
                                    author=author,
                                    agency_id=agency_id),
                                quality=OriginQuality(
                                    associated_phase_count=num_arrivals,
                                    used_phase_count=num_arrivals,
                                    associated_station_count=num_arrivals_p,
                                    used_station_count=num_arrivals_p,
                                    azimuthal_gap=max_az_gap,
                                    minimum_distance=min_dist,
                                    maximum_distance=max_dist,
                                    median_distance=med_dist))
                event.origins.append(origin)

        sdx_file.close()

        # Skip event if no computed origin
        if found_origin is False:
            print("No origin found ... skipping event")
            continue

        # Get pick details, append to pick and arrival objects
        sdx_file = open(sdx_file_path, "r")
        found_pick = False
        for line in sdx_file:
            if line.rstrip() == "pick":
                found_pick = True
                sdxpick = list(islice(sdx_file, 15))
                pick_time = UTCDateTime("{:}T{:}".format(
                    sdxpick[1][0:10].replace(".", "-"), sdxpick[1][11:23]))
                network = sdxpick[2].split()[0]
                station = sdxpick[2].split()[1]
                location = sdxpick[2].split()[2]
                if "NOT_SET" in location:
                    location = ""
                channel = sdxpick[2].split()[3]
                onset = sdxpick[8].split()[0]
                if onset == "0":
                    pickonset = "emergent"
                elif onset == "1":
                    pickonset = "impulsive"
                elif onset == "2":
                    pickonset = "questionable"
                phase = sdxpick[9].split()[0]
                polarity = sdxpick[10].split()[0]
                if polarity == "0":
                    pol = "positive"
                elif polarity == "1":
                    pol = "negative"
                elif polarity == "2":
                    pol = "undecidable"
                weight = int(sdxpick[11].split()[0])
                creation_time = UTCDateTime("{:}T{:}".format(
                    sdxpick[14].split()[6][0:10].replace(".", "-"),
                    sdxpick[14].split()[6][11:23]))
                pick = Pick(
                    time=pick_time,
                    waveform_id=WaveformStreamID(network_code=network,
                                                 station_code=station,
                                                 location_code=location,
                                                 channel_code=channel),
                    time_errors=time_uncertainties[weight],
                    evaluation_mode="manual",
                    evaluation_status="confirmed",
                    onset=pickonset,
                    phase_hint=phase,
                    polarity=pol,
                    method_id=ResourceIdentifier(id="SDX"),
                    creation_info=CreationInfo(creation_time=creation_time))
                event.picks.append(pick)

                # Compute azimuth, distance, append to arrival object
                for i in range(0, len(stations)):
                    if stations[i][0] == station:
                        azimuth = (gps2dist_azimuth(evt_lat, evt_lon,
                                                    stations[i][1],
                                                    stations[i][2])[1])
                        dist_deg = locations2degrees(evt_lat, evt_lon,
                                                     stations[i][1],
                                                     stations[i][2])
                        arrival = Arrival(phase=phase,
                                          pick_id=pick.resource_id,
                                          azimuth=azimuth,
                                          distance=dist_deg,
                                          time_weight=1.00)
                        event.origins[0].arrivals.append(arrival)

        # Skip event if no picks
        if found_pick is False:
            print("No picks found ... skipping event")
            continue

        # Set preferred origin and append event to catalogue
        event.preferred_origin_id = event.origins[0].resource_id
        cat.events.append(event)

        sdx_file.close()

    cat.write(out_xml, format="QUAKEML")
def fit_spectra(
    st,
    origin,
    kappa=0.035,
    RP=0.55,
    VHC=0.7071068,
    FSE=2.0,
    density=2.8,
    shear_vel=3.7,
    R0=1.0,
    moment_factor=100,
    min_stress=0.1,
    max_stress=10000,
):
    """
    Fit spectra vaying stress_drop and moment.

    Args:
        st (StationStream):
            Stream of data.
        origin (ScalarEvent):
             ScalarEvent object.
        kappa (float):
            Site diminution factor (sec). Typical value for active cruststal
            regions is about 0.03-0.04, and stable continental regions is about
            0.006.
        RP (float):
            Partition of shear-wave energy into horizontal components.
        VHC (float):
            Partition of shear-wave energy into horizontal components
            1 / np.sqrt(2.0).
        FSE (float):
            Free surface effect.
        density (float):
            Density at source (gm/cc).
        shear_vel (float):
            Shear-wave velocity at source (km/s).
        R0 (float):
            Reference distance (km).
        moment_factor (float):
            Multiplicative factor for setting bounds on moment, where the
            moment (from the catalog moment magnitude) is multiplied and
            divided by `moment_factor` to set the bounds for the spectral
            optimization.
        min_stress (float):
            Min stress for fit search (bars).
        max_stress (float):
            Max stress for fit search (bars).

    Returns:
        StationStream with fitted spectra parameters.
    """
    for tr in st:
        # Only do this for horizontal channels for which the smoothed spectra
        # has been computed.
        if tr.hasCached("smooth_signal_spectrum") and tr.hasParameter(
                "corner_frequencies"):
            event_mag = origin.magnitude
            event_lon = origin.longitude
            event_lat = origin.latitude
            dist = (gps2dist_azimuth(
                lat1=event_lat,
                lon1=event_lon,
                lat2=tr.stats["coordinates"]["latitude"],
                lon2=tr.stats["coordinates"]["longitude"],
            )[0] * M_TO_KM)

            # Use the smoothed spectra for fitting
            smooth_signal_dict = tr.getCached("smooth_signal_spectrum")
            freq = np.array(smooth_signal_dict["freq"])
            obs_spec = np.array(smooth_signal_dict["spec"])

            # -----------------------------------------------------------------
            # INITIAL VALUES
            # Need an approximate stress drop as initial guess
            stress_0 = np.sqrt(min_stress * max_stress)
            moment_0 = moment_from_magnitude(event_mag)

            # Array of initial values
            x0 = (np.log(moment_0), np.log(stress_0))

            # Bounds
            stress_bounds = (np.log(min_stress), np.log(max_stress))

            # multiplicative factor for moment bounds
            moment_bounds = (
                x0[0] - np.log(moment_factor),
                x0[0] + np.log(moment_factor),
            )

            bounds = (moment_bounds, stress_bounds)

            # Frequency limits for cost function
            freq_dict = tr.getParameter("corner_frequencies")
            fmin = freq_dict["highpass"]
            fmax = freq_dict["lowpass"]

            # -----------------------------------------------------------------
            # CONSTANT ARGUMENTS

            cargs = (
                freq,
                obs_spec,
                fmin,
                fmax,
                dist,
                kappa,
                RP,
                VHC,
                FSE,
                shear_vel,
                density,
                R0,
            )

            result = minimize(
                spectrum_cost,
                x0,
                args=cargs,
                method="L-BFGS-B",
                jac=False,
                bounds=bounds,
                tol=1e-4,
                options={"disp": False},
            )

            moment_fit = np.exp(result.x[0])
            magnitude_fit = magnitude_from_moment(moment_fit)
            stress_drop_fit = np.exp(result.x[1])
            f0_fit = brune_f0(moment_fit, stress_drop_fit)

            # Hessian (H) is in terms of normalized moment and stress drop
            # Covariance matrix is sigma^2 * H^-1.
            inv_hess = result.hess_inv.todense()

            # Estimate of sigma^2 is sum of squared residuals / (n - p)
            # NOTE: we are NOT accounting for the correlation across
            # frequencies and so we are underestimating the variance.
            SSR = result.fun
            sigma2 = SSR / (len(freq) - len(result.x))
            COV = sigma2 * inv_hess
            sd = np.sqrt(np.diagonal(COV))

            # mag_lower = magnitude_from_moment(np.exp(result.x[0]-sd[0]))
            # mag_upper = magnitude_from_moment(np.exp(result.x[0]+sd[0]))
            # stress_drop_lower = np.exp(result.x[1]-sd[1])
            # stress_drop_upper = np.exp(result.x[1]+sd[1])

            # Get the fitted spectrum and then calculate the goodness-of-fit
            # metrics
            fit_spec = model((moment_fit, stress_drop_fit), freq, dist, kappa)
            mean_squared_error = np.mean((obs_spec - fit_spec)**2)

            # R^2 (Coefficient of Determination) is defined as 1 minus the
            # residual sum of squares (SSR) divided by the total sum of squares
            # (SST)
            ssr = np.sum((obs_spec - fit_spec)**2)
            sst = np.sum((obs_spec - np.mean(obs_spec))**2)
            r_squared = 1 - (ssr / sst)

            fit_spectra_dict = {
                "stress_drop": stress_drop_fit,
                "stress_drop_lnsd": sd[1],
                "epi_dist": dist,
                "kappa": kappa,
                "moment": moment_fit,
                "moment_lnsd": sd[0],
                "magnitude": magnitude_fit,
                "f0": f0_fit,
                "minimize_message": result.message,
                "minimize_success": result.success,
                "mean_squared_error": mean_squared_error,
                "R2": r_squared,
            }
            tr.setParameter("fit_spectra", fit_spectra_dict)

    return st
Exemplo n.º 36
0
# Lets start by using a station list and then move to a different way
for sta in stations:
    cursta = sta.strip()
    if debug:
        print 'Current station:' + cursta
    # Time to split the cursta into its network and current station 
    cursta = sta.split()
    net = cursta[0]
    cursta = cursta[1]

    # Now we get the data for the event

    if parserval.trigger:
        lat,lon = getlatlon(cursta, eventtime, sp)
        dist = gps2dist_azimuth(float(cmtlat), float(cmtlon), lat, lon)
        dist = dist[0]/1000.
        if debug:
            print('Distance of station to event: ' + str(dist) + ' km')
            print('Here is our mag distance: ' + str(-415. + mag*190.))
        if dist > 900.:
            try:
                st = getdata(net, cursta, eventtime, lents, dataloc, True)
                print('Got trigger data for ' + cursta + ' which is out of range: ' + str(dist) + ' km')
            except:
                continue
        elif (dist >= 110.) and (dist <= -415. + mag*190.):
            try:
                st = getdata(net, cursta, eventtime, lents, dataloc, True)
                print(st)
            except:
Exemplo n.º 37
0
def RGF_from_SW4(path_to_green=".",
                 t0=0,
                 file_name=None,
                 origin_time=None,
                 event_lat=None,
                 event_lon=None,
                 depth=None,
                 station_name=None,
                 station_lat=None,
                 station_lon=None,
                 output_directory="sw4out"):
    """
    Function to convert reciprocal Green's functions from SW4 to tensor format
    
    Reads the reciprocal Green's functions (displacement/unit force) from SW4 and
    performs the summation to get the Green's function tensor.
    RGFs from SW4 are oriented north, east and positive down by setting az=0.
    
    Assumes the following file structure:
    f[x,y,z]/station_name/event_name.[x,y,z]
    """

    import os
    from obspy.core import read, Stream
    from obspy.geodetics.base import gps2dist_azimuth
    from obspy.core.util.attribdict import AttribDict

    # Defined variables (do not change)
    dirs = ["fz", "fx", "fy"]  # directory to displacement per unit force
    du = [
        "duxdx", "duydy", "duzdz", "duydx", "duxdy", "duzdx", "duxdz", "duzdy",
        "duydz"
    ]
    orientation = ["Z", "N", "E"]  # set az=0 in SW4 so x=north, y=east
    cmpaz = [0, 0, 90]
    cmpinc = [0, 90, 90]

    # Create a new output directory under path_to_green
    dirout = "%s/%s" % (path_to_green, output_directory)
    if os.path.exists(dirout):
        print("Warning: output directory '%s' already exists." % dirout)
    else:
        print("Creating output directory '%s'." % dirout)
        os.mkdir(dirout)

    # Loop over each directory fx, fy, fz
    nsta = len(station_name)
    for i in range(3):
        # Set headers according to the orientation
        if dirs[i][-1].upper() == "Z":
            scale = -1  # change to positive up
        else:
            scale = 1

        # Loop over each station
        for j in range(nsta):
            station = station_name[j]
            stlo = station_lon[j]
            stla = station_lat[j]
            dirin = "%s/%s/%s" % (path_to_green, dirs[i], station)
            print("Reading RGFs from %s:" % (dirin))
            st = Stream()
            for gradient in du:
                fname = "%s/%s.%s" % (dirin, file_name, gradient)
                st += read(fname, format="SAC")

            # Set station headers
            starttime = origin_time - t0
            dist, az, baz = gps2dist_azimuth(event_lat, event_lon, stla, stlo)

            # SAC headers
            sacd = AttribDict()
            sacd.stla = stla
            sacd.stlo = stlo
            sacd.evla = event_lat
            sacd.evlo = event_lon
            sacd.az = az
            sacd.baz = baz
            sacd.dist = dist / 1000  # convert to kilometers
            sacd.o = 0
            sacd.b = -1 * t0
            sacd.cmpaz = cmpaz[i]
            sacd.cmpinc = cmpinc[i]
            sacd.kstnm = station

            # Update start time
            for tr in st:
                tr.stats.starttime = starttime
                tr.stats.distance = dist
                tr.stats.back_azimuth = baz

            # Sum displacement gradients to get reciprocal Green's functions
            tensor = Stream()
            for gradient, element in zip(["duxdx", "duydy", "duzdz"],
                                         ["XX", "YY", "ZZ"]):
                trace = st.select(channel=gradient)[0].copy()
                trace.stats.channel = "%s%s" % (orientation[i], element)
                tensor += trace

            trace = st.select(channel="duydx")[0].copy()
            trace.data += st.select(channel="duxdy")[0].data
            trace.stats.channel = "%s%s" % (orientation[i], "XY")
            tensor += trace

            trace = st.select(channel="duzdx")[0].copy()
            trace.data += st.select(channel="duxdz")[0].data
            trace.stats.channel = "%s%s" % (orientation[i], "XZ")
            tensor += trace

            trace = st.select(channel="duzdy")[0].copy()
            trace.data += st.select(channel="duydz")[0].data
            trace.stats.channel = "%s%s" % (orientation[i], "YZ")
            tensor += trace

            # Set sac headers before saving
            print("    Saving GFs to %s" % dirout)
            for tr in tensor:
                tr.trim(origin_time, tr.stats.endtime)
                tr.data = scale * tr.data
                tr.stats.sac = sacd
                sacout = "%s/%s.%.4f.%s" % (dirout, station, depth,
                                            tr.stats.channel)
                #print("Writing %s to file."%sacout)
                tr.write(sacout, format="SAC")
Exemplo n.º 38
0
def pretty_plot_small(st, stack, eve, not_used, comp, inv, paramdic):
    st2 = st.select(component=comp)
    st2 = st2.copy()

    diss = []
    # compute distances
    for tr in st2:
        coors = inv.get_coordinates(tr.id[:-1] + 'Z')
        (dis, azi, bazi) = gps2dist_azimuth(coors['latitude'],
                                            coors['longitude'],
                                            eve.origins[0].latitude,
                                            eve.origins[0].longitude)
        disdeg = kilometer2degrees(dis / 1000.)
        diss.append(disdeg)
    mdiss = min(diss)
    Mdiss = max(diss)

    diss = np.arange(float(len(st2)))
    for tr in st2:
        tr.data /= np.max(np.abs(stack))
    stack /= np.max(np.abs(stack))
    ptp = np.ptp(stack)

    ran = 1.

    fig = plt.figure(1, figsize=(16, 12))
    tithand = st[0].stats.network + ' ' + paramdic['phase'] + '-Wave '
    if comp == 'R':
        tithand += ' Radial '
    elif comp == 'Z':
        tithand += ' Vertical '
    elif comp == 'T':
        tithand += ' Transverse '
    tithand += str(eve['origins'][0]['time'].year) + ' '
    tithand += str(eve['origins'][0]['time'].julday) + ' '
    tithand += str(eve['origins'][0]['time'].hour).zfill(2) + ':' + str(
        eve['origins'][0]['time'].minute).zfill(2)
    mag = eve.magnitudes[0].mag
    magstr = eve.magnitudes[0].magnitude_type
    if 'Lg' in magstr:
        magstr = 'mb_{Lg}'
    tithand += ' $' + magstr + '$=' + str(mag)
    plt.title(tithand)
    labs = []
    for pair in zip(diss, st2):
        labs.append((pair[1].id).replace('.', ' '))
        t = pair[1].times()
        if pair[1].max() > np.max(np.abs(stack)) * 3.:
            p = plt.plot(t, (pair[1].data) / (np.max(np.abs(stack)) * 3.) +
                         pair[0])
            plt.text(min(t) + 1.,
                     pair[0] + .2,
                     (pair[1].id)[:-4].replace('.', ' ') + ' gain',
                     color=p[0].get_color())
        else:
            p = plt.plot(t, pair[1].data / ran + pair[0])
            plt.text(min(t) + 1.,
                     pair[0] - +.2, (pair[1].id)[:-4].replace('.', ' '),
                     color=p[0].get_color())
        plt.plot(t, stack / ran + pair[0], color='k', alpha=0.5, linewidth=3)
    plt.yticks(diss, labs)
    plt.plot([10., 10.], [-1000., 1000.], color='k', linewidth=3)

    plt.ylim((min(diss) - 1, max(diss) + 1))
    plt.xlim((min(t), max(t)))
    plt.xlabel('Time (s)')
    plt.ylabel('Station index')
    if not os.path.exists(st[0].stats.network + '_results'):
        os.mkdir(st[0].stats.network + '_results')
    plt.savefig(st[0].stats.network + '_results/' + st[0].stats.network + '_' +
                comp + '_' + str(eve['origins'][0]['time'].year) +
                str(eve['origins'][0]['time'].julday) + '_' +
                str(eve['origins'][0]['time'].hour).zfill(2) +
                str(eve['origins'][0]['time'].minute).zfill(2) + '.png',
                format='PNG',
                dpi=400)

    plt.clf()
    plt.close()
    return
Exemplo n.º 39
0
def signal_end(
    st,
    event_time,
    event_lon,
    event_lat,
    event_mag,
    method=None,
    vmin=None,
    floor=None,
    model=None,
    epsilon=2.0,
):
    """
    Estimate end of signal by using a model of the 5-95% significant
    duration, and adding this value to the "signal_split" time. This probably
    only works well when the split is estimated with a p-wave picker since
    the velocity method often ends up with split times that are well before
    signal actually starts.

    Args:
        st (StationStream):
            Stream of data.
        event_time (UTCDateTime):
            Event origin time.
        event_mag (float):
            Event magnitude.
        event_lon (float):
            Event longitude.
        event_lat (float):
            Event latitude.
        method (str):
            Method for estimating signal end time. Either 'velocity'
            or 'model'.
        vmin (float):
            Velocity (km/s) for estimating end of signal. Only used if
            method="velocity".
        floor (float):
            Minimum duration (sec) applied along with vmin.
        model (str):
            Short name of duration model to use. Must be defined in the
            gmprocess/data/modules.yml file.
        epsilon (float):
            Number of standard deviations; if epsilon is 1.0, then the signal
            window duration is the mean Ds + 1 standard deviation. Only used
            for method="model".

    Returns:
        trace with stats dict updated to include a
        stats['processing_parameters']['signal_end'] dictionary.

    """
    # Load openquake stuff if method="model"
    if method == "model":
        dmodel = load_model(model)

        # Set some "conservative" inputs (in that they will tend to give
        # larger durations).
        rctx = RuptureContext()
        rctx.mag = event_mag
        rctx.rake = -90.0
        rctx.vs30 = np.array([180.0])
        rctx.z1pt0 = np.array([0.51])
        dur_imt = imt.from_string("RSD595")
        stddev_types = [const.StdDev.TOTAL]

    for tr in st:
        if not tr.hasParameter("signal_split"):
            continue
        if method == "velocity":
            if vmin is None:
                raise ValueError('Must specify vmin if method is "velocity".')
            if floor is None:
                raise ValueError('Must specify floor if method is "velocity".')
            epi_dist = (gps2dist_azimuth(
                lat1=event_lat,
                lon1=event_lon,
                lat2=tr.stats["coordinates"]["latitude"],
                lon2=tr.stats["coordinates"]["longitude"],
            )[0] / 1000.0)
            end_time = event_time + max(floor, epi_dist / vmin)
        elif method == "model":
            if model is None:
                raise ValueError('Must specify model if method is "model".')
            epi_dist = (gps2dist_azimuth(
                lat1=event_lat,
                lon1=event_lon,
                lat2=tr.stats["coordinates"]["latitude"],
                lon2=tr.stats["coordinates"]["longitude"],
            )[0] / 1000.0)
            # Repi >= Rrup, so substitution here should be conservative
            # (leading to larger durations).
            rctx.rrup = np.array([epi_dist])
            rctx.sids = np.array(range(np.size(rctx.rrup)))
            lnmu, lnstd = dmodel.get_mean_and_stddevs(rctx, rctx, rctx,
                                                      dur_imt, stddev_types)
            duration = np.exp(lnmu + epsilon * lnstd[0])
            # Get split time
            split_time = tr.getParameter("signal_split")["split_time"]
            end_time = split_time + float(duration)
        else:
            raise ValueError('method must be either "velocity" or "model".')
        # Update trace params
        end_params = {
            "end_time": end_time,
            "method": method,
            "vsplit": vmin,
            "floor": floor,
            "model": model,
            "epsilon": epsilon,
        }
        tr.setParameter("signal_end", end_params)

    return st
Exemplo n.º 40
0
according to the documentation of the functions in noise.py.
"""

import numpy as np
import noise
from obspy import read
from obspy.geodetics.base import gps2dist_azimuth
import matplotlib.pyplot as plt

ref_curve = np.loadtxt("Average_phase_velocity_rayleigh")

tr1 = read("preprocessed_data/SULZ.LHZ.CH.2013.219.processed.SAC")[0]
tr2 = read("preprocessed_data/VDL.LHZ.CH.2013.219.processed.SAC")[0]
# bad example with only one day of correlation

dist, az, baz = gps2dist_azimuth(tr1.stats.sac.stla, tr1.stats.sac.stlo,
                                 tr2.stats.sac.stla, tr2.stats.sac.stlo)

freq, xcorr, n_corr_wins = noise.noisecorr(tr1,
                                           tr2,
                                           window_length=3600.,
                                           overlap=0.5)

smoothed = noise.velocity_filter(freq,
                                 xcorr,
                                 dist / 1000.,
                                 velband=(6.0, 5.0, 1.5, 0.5),
                                 return_all=False)

crossings,phase_vel = noise.extract_phase_velocity(freq,smoothed,dist/1000.,ref_curve,\
                         freqmin=0.004,freqmax=0.25, min_vel=1.5, max_vel=5.0,min_amp=0.0,\
                         horizontal_polarization=False, smooth_spectrum=False,plotting=True)
Exemplo n.º 41
0
def trim_multiple_events(
    st,
    origin,
    catalog,
    travel_time_df,
    pga_factor,
    pct_window_reject,
    gmpe,
    site_parameters,
    rupture_parameters,
):
    """
    Uses a catalog (list of ScalarEvents) to handle cases where a trace might
    contain signals from multiple events. The catalog should contain events
    down to a low enough magnitude in relation to the events of interest.
    Overall, the algorithm is as follows:

    1) For each earthquake in the catalog, get the P-wave travel time
       and estimated PGA at this station.

    2) Compute the PGA (of the as-recorded horizontal channels).

    3) Select the P-wave arrival times across all events for this record
       that are (a) within the signal window, and (b) the predicted PGA is
       greater than pga_factor times the PGA from step #1.

    4) If any P-wave arrival times match the above criteria, then if any of
       the arrival times fall within in the first pct_window_reject*100%
       of the signal window, then reject the record. Otherwise, trim the
       record such that the end time does not include any of the arrivals
       selected in step #3.

    Args:
        st (StationStream):
            Stream of data.
        origin (ScalarEvent):
            ScalarEvent object associated with the StationStream.
        catalog (list):
            List of ScalarEvent objects.
        travel_time_df (DataFrame):
            A pandas DataFrame that contains the travel time information
            (obtained from
             gmprocess.waveform_processing.phase.create_travel_time_dataframe).
            The columns in the DataFrame are the station ids and the indices
            are the earthquake ids.
        pga_factor (float):
            A decimal factor used to determine whether the predicted PGA
            from an event arrival is significant enough that it should be
            considered for removal.
        pct_window_reject (float):
           A decimal from 0.0 to 1.0 used to determine if an arrival should
            be trimmed from the record, or if the entire record should be
            rejected. If the arrival falls within the first
            pct_window_reject * 100% of the signal window, then the entire
            record will be rejected. Otherwise, the record will be trimmed
            appropriately.
        gmpe (str):
            Short name of the GMPE to use. Must be defined in the modules file.
        site_parameters (dict):
            Dictionary of site parameters to input to the GMPE.
        rupture_parameters:
            Dictionary of rupture parameters to input to the GMPE.

    Returns:
        StationStream: Processed stream.

    """

    if not st.passed:
        return st

    # Check that we know the signal split for each trace in the stream
    for tr in st:
        if not tr.hasParameter("signal_split"):
            return st

    signal_window_starttime = st[0].getParameter("signal_split")["split_time"]

    arrivals = travel_time_df[st[0].stats.network + "." + st[0].stats.station]
    arrivals = arrivals.sort_values()

    # Filter by any arrival times that appear in the signal window
    arrivals = arrivals[(arrivals > signal_window_starttime)
                        & (arrivals < st[0].stats.endtime)]

    # Make sure we remove the arrival that corresponds to the event of interest
    if origin.id in arrivals.index:
        arrivals.drop(index=origin.id, inplace=True)

    if arrivals.empty:
        return st

    # Calculate the recorded PGA for this record
    stasum = StationSummary.from_stream(st, ["ROTD(50.0)"], ["PGA"])
    recorded_pga = stasum.get_pgm("PGA", "ROTD(50.0)")

    # Load the GMPE model
    gmpe = load_model(gmpe)

    # Generic context
    rctx = RuptureContext()

    # Make sure that site parameter values are converted to numpy arrays
    site_parameters_copy = site_parameters.copy()
    for k, v in site_parameters_copy.items():
        site_parameters_copy[k] = np.array([site_parameters_copy[k]])
    rctx.__dict__.update(site_parameters_copy)

    # Filter by arrivals that have significant expected PGA using GMPE
    is_significant = []
    for eqid, arrival_time in arrivals.items():
        event = next(event for event in catalog if event.id == eqid)

        # Set rupture parameters
        rctx.__dict__.update(rupture_parameters)
        rctx.mag = event.magnitude

        # TODO: distances should be calculated when we refactor to be
        # able to import distance calculations
        rctx.repi = np.array([
            gps2dist_azimuth(
                st[0].stats.coordinates.latitude,
                st[0].stats.coordinates.longitude,
                event.latitude,
                event.longitude,
            )[0] / 1000
        ])
        rctx.rjb = rctx.repi
        rctx.rhypo = np.sqrt(rctx.repi**2 + event.depth_km**2)
        rctx.rrup = rctx.rhypo
        rctx.sids = np.array(range(np.size(rctx.rrup)))
        pga, sd = gmpe.get_mean_and_stddevs(rctx, rctx, rctx, imt.PGA(), [])

        # Convert from ln(g) to %g
        predicted_pga = 100 * np.exp(pga[0])
        if predicted_pga > (pga_factor * recorded_pga):
            is_significant.append(True)
        else:
            is_significant.append(False)

    significant_arrivals = arrivals[is_significant]
    if significant_arrivals.empty:
        return st

    # Check if any of the significant arrivals occur within the
    signal_length = st[0].stats.endtime - signal_window_starttime
    cutoff_time = signal_window_starttime + pct_window_reject * (signal_length)
    if (significant_arrivals < cutoff_time).any():
        for tr in st:
            tr.fail("A significant arrival from another event occurs within "
                    "the first %s percent of the signal window" %
                    (100 * pct_window_reject))

    # Otherwise, trim the stream at the first significant arrival
    else:
        for tr in st:
            signal_end = tr.getParameter("signal_end")
            signal_end["end_time"] = significant_arrivals[0]
            signal_end["method"] = "Trimming before right another event"
            tr.setParameter("signal_end", signal_end)
        cut(st)

    return st
Exemplo n.º 42
0
    def writeStations(self):
        '''Write station information to file, which can be loaded as a pandas dataframe'''

        ofname = 'Stations_%s_%s_%s_%s_%s_%s_mag_%s-%s_depth_%s-%s_km.dat' %(self.starttime,self.endtime,self.minlatitude,\
         self.minlongitude,self.maxlatitude,self.maxlongitude,self.minmag,self.maxmag,self.mindepth,self.maxdepth)

        outfile = open(ofname, 'w')

        try:

            for network in self.inventory:

                netname = network.code

                for station in network:

                    code = station.code
                    lat = station.latitude
                    lon = station.longitude
                    ele = station.elevation
                    stdate = station.start_date

                    if self.station_autoselect_flag == True:
                        #EK added 04/2019 to write only stations that we will later download
                        cnt = 0.

                        for event in self.quake_cat:

                            time = event.origins[0].time
                            evlat = event.origins[0].latitude
                            evlon = event.origins[0].longitude
                            dep = event.origins[0].depth / 1000.
                            mag = event.magnitudes[0].mag

                            ddeg = locations2degrees(evlat, evlon, lat, lon)
                            distance_m, az, baz = gps2dist_azimuth(
                                evlat, evlon, lat, lon)

                            theta = np.arctan2(distance_m, dep * 1000.)

                            if theta <= np.pi / 4:

                                arrivals = self.vmodel.get_travel_times(
                                    source_depth_in_km=dep,
                                    distance_in_degree=ddeg,
                                    phase_list=["s", "S"])

                                if len(arrivals) > 0:
                                    cnt = cnt + 1
                        if cnt > 0:

                            outfile.write(
                                "%s %s %s %s %s %s\n" %
                                (lon, lat, ele, netname, code, stdate))
                    else:
                        outfile.write("%s %s %s %s %s %s\n" %
                                      (lon, lat, ele, netname, code, stdate))

            outfile.close()

        except:

            print("Need to run fetchInventory before writing stations")
            sys.exit(1)
stla = 0.0
Tmin = 10.0 #minimum period
Tmax = 200.0 #maximum period
nT = 100
noise_level = 0.10
#insta_db_name = '/home/romaguir/Documents/10s_PREM_ANI_FORCES'
insta_db_name = '/media/romaguir/wdseis/instaseis_databases/aicehot_5km_prem/aicehot_5km_prem_database/'
#window_min = 600.0
#window_max = 1600.0
window_min = 400.0
window_max = 3600.0
m_rr = 3.98e13
m_pp = 3.98e13
planet_radius = 1565.0
#planet_radius = 6371.0
distaz = gps2dist_azimuth(lat1=evla, lon1=evlo, lat2=stla, lon2=stlo, a=planet_radius*1000.0, f=0.0) # no flattening
gcarc_m = distaz[0]
dist_km = gcarc_m / 1000.0


#get instaseis seismogram
instaseis_db = instaseis.open_db(insta_db_name)
source = instaseis.Source(latitude=evla,
                          longitude=evlo,
                          depth_in_m=evdp*1000.,
                          m_rr = m_rr,
                          m_pp = m_rr)
receiver = instaseis.Receiver(latitude=stla,
                              longitude=stlo)
stream = instaseis_db.get_seismograms(source=source,
                                      receiver=receiver,
Exemplo n.º 44
0
def proceve(eve, sta, debug=False):

    try:
        coords = sp.get_coordinates(net + '.' + sta + '.00.LHZ',
                                    eve.origins[0].time)
    except:
        return
    (dis, azi, bazi) = gps2dist_azimuth(coords['latitude'],
                                        coords['longitude'],
                                        eve.origins[0].latitude,
                                        eve.origins[0].longitude)

    # Now in km
    dis *= 1. / 1000.
    disdeg = dis * 0.0089932
    # Check for events way outside of our interested window
    if disdeg <= 50. or disdeg >= 120.:
        return
    fstring = 'Results/JUNK' + net + '_' + sta + '_' + str(eve.origins[0].time.year) + '_' + str(eve.origins[0].time.julday).zfill(3) + \
            '_' + str(eve.origins[0].time.hour).zfill(2) + '_' + str(eve.origins[0].time.minute).zfill(2) + \
            '_Results.csv'
    feve = open(fstring, 'w')
    feve.write(
        'sta, loc, year, day, f0, distance, azimuth, corr, mag, mHV, stdHV, pRV, pNV, pEV \n'
    )
    if debug:
        print('Distance: ' + str(dis))
        print('Azimuth: ' + str(azi))
    # compute arrival start and end times 620 s window
    astime = eve.origins[0].time + int(dis / 4.) - 30.
    aetime = astime + window - 30.

    # Grab the data now have trimmed RT data in velocity with filter
    locs = glob.glob('/msd/' + net + '_' + sta + '/' + str(astime.year) + '/' +
                     '/' + str(astime.julday).zfill(3) + '/*LHZ*')
    locs = [(loc.split('/')[-1]).split('_')[0] for loc in locs]
    for loc in locs:
        try:
            #if True:
            if debug:
                print('Grabbing the event data')
            st = grabdata(astime, aetime, bazi, sp, sta, net, loc)

        except:
            print('No data for: ' + sta)
            continue
        if debug:
            print(Noisest)
            print(st)

        for f0 in f0s:

            st2 = finalfilter(st, 0., bazi, astime, aetime, True)
            # Here is our data in the ZNE directions so no rotation
            st2ZNE = finalfilter(st, 0., 0., astime, aetime, False)
            st2 += st2ZNE
            st2.merge()

            if st2[0].stats.npts < .9 * window:
                continue
            # We now have a good event with high SNR

            #mHV, stdHV, corr = HVscheme2(st, f0, bazi, astime, aetime, disdeg, eve)

            st2 = finalfilter(st, f0, bazi, astime, aetime, True)
            corrs = []
            win = int(round(1. / f0, 0))
            for window2 in st2.slide(window_length=win,
                                     step=int(round(win / 16., 0))):
                HilbertV = np.imag(
                    hilbert(window2.select(component="Z")[0].data))
                lag, corr = xcorr(HilbertV,
                                  window2.select(component="R")[0].data,
                                  5,
                                  full_xcorr=False)
                #corr = pearsonr(HilbertV, window.select(component="R")[0].data)
                corrs.append(corr)
            corr = corrs
            HilbertV = np.imag(hilbert(st2.select(component="Z")[0].data))
            oldx = np.asarray(range(
                len(corr))) / (float(len(corr)) / float(len(HilbertV)))
            corr = np.interp(range(len(HilbertV)), oldx, corr)
            env = envelope(
                st2.select(component="R")[0].data) * envelope(HilbertV)
            HV = envelope(
                st2.select(component="R")[0].data) / envelope(HilbertV)
            env *= 1. / np.max(np.abs(env))
            t = np.asarray(range(len(HilbertV)))

            lim = t[(corr >= .90)]
            HV2 = HV[(corr >= .90)]
            lim = lim[(HV2 <= np.mean(HV2) + 3. * np.std(HV2))
                      & (HV2 >= np.mean(HV2) - 3. * np.std(HV2))]
            HV2 = HV2[(HV2 <= np.mean(HV2) + 3. * np.std(HV2))
                      & (HV2 >= np.mean(HV2) - 3. * np.std(HV2))]

            mHV = np.mean(HV2)
            stdHV = np.std(HV2)
            stdHVL = np.std(np.log10(HV2))
            try:
                #if True:
                fig = plt.figure(1, figsize=(12, 12))
                plt.subplots_adjust(hspace=0.001)
                plt.subplot(211)
                plt.title(st[0].stats.network + ' ' + st[0].stats.station +
                          ' ' + ' Period: ' + str(int(round(1. / f0, 0))) +
                          ' s Distance: ' + str(round(disdeg, 0)) + ' degrees')
                plt.plot(t,
                         HilbertV * 10**9,
                         linewidth=2.5,
                         label=' Shifted Vertical: ' + loc)
                plt.xlim((min(t), max(t)))
                plt.plot(t,
                         st2.select(component="R")[0].data * 10**9,
                         linewidth=2.5,
                         label='Radial: ' + loc)
                plt.ylabel('Velocity (nm/s)')
                plt.xticks([])
                plt.legend(loc=1)
                plt.subplot(212)
                plt.plot(t,
                         np.log10(HV),
                         label=loc + ' (H/V=' + str(round(np.log10(mHV), 2)) +
                         '$\pm$' + str(round(stdHVL, 2)),
                         linewidth=2.5)
                #plt.plot(t, corr, label=loc + ' Characteristic Function')
                plt.ylim((-0.6, .6))
                plt.yticks([-0.3, 0., 0.3])
                plt.ylabel('H/V Ratio')
                plt.axvspan(min(lim), max(lim), 0., 2., alpha=.3, color='.5')
                plt.xlim((min(t), max(t)))
                plt.xlabel('Time (s)')
                plt.legend(loc=1)
            except:
                print('Problem continue')
            #plt.show()
            #plt.clf()

            #pV = np.fft.rfft(st2.select(component="Z")[0].data)
            #freqmin = f0/np.sqrt(2.)
            #freqmax = f0*np.sqrt(2.)
            #freqs = np.fft.rfftfreq(st2[0].stats.npts)
            #pV = pV[(freqs <= freqmax) & (freqs >= freqmin)]
            #feve.write(sta + ', ' + loc + ', ' + str(eve.origins[0].time.year) +', ' +
            #str(eve.origins[0].time.julday).zfill(3) + ', ' + str(f0) + ', ' +
            #str(disdeg) + ', ' + str(azi) + ', ' + str(eve.magnitudes[0].mag)
            #+ ', ' + str(mHV) + ', ' + str(stdHV))
            #for comp in ['R', 'N', 'E']:
            #pC = np.fft.rfft(st2.select(component=comp)[0].data)
            #pC = pC[(freqs <= freqmax) & (freqs >= freqmin)]
            #pCV = np.mean(np.abs(pC)/np.abs(pV))
            #feve.write( ', ' + str(pCV) )
            #feve.write(' \n')
    plt.savefig('BOTHPLT_' + st[0].stats.network + '_' + st[0].stats.station + '_' + st[0].stats.location + '_' + str(eve.origins[0].time.year) + '_' + str(eve.origins[0].time.julday).zfill(3) + \
            '_' + str(eve.origins[0].time.hour).zfill(2) + '_' + str(eve.origins[0].time.minute).zfill(2) + '_' + str(int(round(1./f0,0))) + '.pdf', format='PDF', dpi=400)
    plt.clf()
    feve.close()
    num_lines = sum(1 for line in open(fstring))
    if num_lines <= 1:
        os.remove(fstring)
    return
Exemplo n.º 45
0
def process(data_source1,
            data_source2,
            output_path,
            interval_seconds,
            window_seconds,
            window_overlap,
            window_buffer_length,
            resample_rate=None,
            taper_length=0.05,
            nearest_neighbours=1,
            fmin=None,
            fmax=None,
            netsta_list1='*',
            netsta_list2='*',
            pairs_to_compute=None,
            start_time='1970-01-01T00:00:00',
            end_time='2100-01-01T00:00:00',
            instrument_response_inventory=None,
            instrument_response_output='vel',
            water_level=50,
            clip_to_2std=False,
            whitening=False,
            whitening_window_frequency=0,
            one_bit_normalize=False,
            read_buffer_size=10,
            ds1_zchan=None,
            ds1_nchan=None,
            ds1_echan=None,
            ds2_zchan=None,
            ds2_nchan=None,
            ds2_echan=None,
            corr_chan=None,
            envelope_normalize=False,
            ensemble_stack=False,
            restart=False,
            dry_run=False,
            no_tracking_tag=False):
    """
    :param data_source1: Text file containing paths to ASDF files
    :param data_source2: Text file containing paths to ASDF files
    :param output_path: Output folder
    :param interval_seconds: Length of time window (s) over which to compute cross-correlations; e.g. 86400 for 1 day
    :param window_seconds: Length of stacking window (s); e.g 3600 for an hour. interval_seconds must be a multiple of \
                    window_seconds; no stacking is performed if they are of the same size.
    """
    read_buffer_size *= interval_seconds
    if (os.path.exists(netsta_list1)):
        netsta_list1 = ' '.join(open(netsta_list1).readlines()).replace(
            '\n', ' ').strip()
    # end if
    if (os.path.exists(netsta_list2)):
        netsta_list2 = ' '.join(open(netsta_list2).readlines()).replace(
            '\n', ' ').strip()
    # end if

    comm = MPI.COMM_WORLD
    nproc = comm.Get_size()
    rank = comm.Get_rank()

    ds1 = Dataset(data_source1, netsta_list1)
    ds2 = Dataset(data_source2, netsta_list2)

    proc_stations = []
    time_tag = None
    if (rank == 0):
        # Register time tag with high resolution, since queued jobs can readily
        # commence around the same time.

        if (no_tracking_tag):
            time_tag = None
        else:
            time_tag = UTCDateTime.now().strftime("%y-%m-%d.T%H.%M.%S.%f")

        def outputConfigParameters():
            # output config parameters
            fn = 'correlator.%s.cfg' % (
                time_tag) if time_tag else 'correlator.cfg'
            fn = os.path.join(output_path, fn)

            f = open(fn, 'w+')
            f.write('Parameters Values:\n\n')
            f.write('%25s\t\t\t: %s\n' % ('DATA_SOURCE1', data_source1))
            f.write('%25s\t\t\t: %s\n' % ('DATA_SOURCE2', data_source2))
            f.write('%25s\t\t\t: %s\n' % ('OUTPUT_PATH', output_path))
            f.write('%25s\t\t\t: %s\n' %
                    ('INTERVAL_SECONDS', interval_seconds))
            f.write('%25s\t\t\t: %s\n\n' % ('WINDOW_SECONDS', window_seconds))
            f.write('%25s\t\t\t: %s\n\n' % ('WINDOW_OVERLAP', window_overlap))

            f.write('%25s\t\t\t: %s\n' %
                    ('--window-buffer-length', window_buffer_length))
            f.write('%25s\t\t\t: %s\n' % ('--resample-rate', resample_rate))
            f.write('%25s\t\t\t: %s\n' % ('--taper-length', taper_length))
            f.write('%25s\t\t\t: %s\n' %
                    ('--nearest-neighbours', nearest_neighbours))
            f.write('%25s\t\t\t: %s\n' % ('--fmin', fmin))
            f.write('%25s\t\t\t: %s\n' % ('--fmax', fmax))
            f.write('%25s\t\t\t: %s\n' % ('--station-names1', netsta_list1))
            f.write('%25s\t\t\t: %s\n' % ('--station-names2', netsta_list2))
            f.write('%25s\t\t\t: %s\n' % ('--start-time', start_time))
            f.write('%25s\t\t\t: %s\n' % ('--end-time', end_time))
            f.write('%25s\t\t\t: %s\n' % ('--instrument-response-inventory',
                                          instrument_response_inventory))
            f.write(
                '%25s\t\t\t: %s\n' %
                ('--instrument-response-output', instrument_response_output))
            f.write('%25s\t\t\t: %s\n' % ('--corr-chan', corr_chan))
            f.write('%25s\t\t\t: %s\n' % ('--water-level', water_level))
            f.write('%25s\t\t\t: %s\n' % ('--clip-to-2std', clip_to_2std))
            f.write('%25s\t\t\t: %s\n' %
                    ('--one-bit-normalize', one_bit_normalize))
            f.write('%25s\t\t\t: %s\n' %
                    ('--read-buffer-size', read_buffer_size))
            f.write('%25s\t\t\t: %s\n' %
                    ('--envelope-normalize', envelope_normalize))
            f.write('%25s\t\t\t: %s\n' % ('--whitening', whitening))
            if (whitening):
                f.write('%25s\t\t\t: %s\n' % ('--whitening-window-frequency',
                                              whitening_window_frequency))
            f.write('%25s\t\t\t: %s\n' % ('--ensemble-stack', ensemble_stack))
            f.write('%25s\t\t\t: %s\n' %
                    ('--restart', 'TRUE' if restart else 'FALSE'))
            f.write(
                '%25s\t\t\t: %s\n' %
                ('--no-tracking-tag', 'TRUE' if no_tracking_tag else 'FALSE'))

            f.close()

        # end func

        def cull_pairs(pairs, keep_list_fn):
            result = set()
            pairs_set = set()

            for pair in pairs:
                pairs_set.add('%s.%s' % (pair[0], pair[1]))
            # end for

            keep_list = open(keep_list_fn, 'r').readlines()
            for keep_pair in keep_list:
                keep_pair = keep_pair.strip()
                if (len(keep_pair)):
                    knet1, ksta1, knet2, ksta2 = keep_pair.split('.')

                    keep_pair_alt = '%s.%s.%s.%s' % (knet2, ksta2, knet1,
                                                     ksta1)

                    if (keep_pair in pairs_set or keep_pair_alt in pairs_set):
                        result.add(('%s.%s' % (knet1, ksta1),
                                    '%s.%s' % (knet2, ksta2)))
                # end if
            # end for

            return list(result)

        # end func

        outputConfigParameters()

        pairs = ds1.get_unique_station_pairs(ds2, nn=nearest_neighbours)
        if (pairs_to_compute):
            # only keep pairs provided in the text file, given they exist in the data-sets
            pairs = cull_pairs(pairs, pairs_to_compute)
        # end if

        # print out station-pairs for dry runs
        if (dry_run):
            print('Computing %d station-pairs: ' % (len(pairs)))
            for pair in pairs:
                print('.'.join(pair))
            # end for
        # end if

        random.Random(nproc).shuffle(
            pairs)  # using nproc as seed so that shuffle produces the same
        # ordering when jobs are restarted.
        proc_stations = split_list(pairs, npartitions=nproc)
    # end if

    if (dry_run):
        # nothing more to do for dry runs
        return
    # end if

    # broadcast workload to all procs
    proc_stations = comm.bcast(proc_stations, root=0)
    time_tag = comm.bcast(time_tag, root=0)

    # read inventory
    inv = None
    stationInvCache = defaultdict(list)
    if (instrument_response_inventory):
        try:
            inv = read_inventory(instrument_response_inventory)
        except Exception as e:
            print(e)
        # end try
    # end if

    # Progress tracker
    progTracker = ProgressTracker(output_folder=output_path,
                                  restart_mode=restart)

    startTime = UTCDateTime(start_time)
    endTime = UTCDateTime(end_time)
    for pair in proc_stations[rank]:
        netsta1, netsta2 = pair

        if (progTracker.increment()):
            pass
        else:
            print(('Found results for station-pair: %s.%s. Moving along..' %
                   (netsta1, netsta2)))
            continue
        # end if

        netsta1inv, stationInvCache = getStationInventory(
            inv, stationInvCache, netsta1)
        netsta2inv, stationInvCache = getStationInventory(
            inv, stationInvCache, netsta2)

        def evaluate_channels(cha1, cha2):
            result = []
            for netsta, cha, ds in zip((netsta1, netsta2), (cha1, cha2),
                                       (ds1, ds2)):
                if ('*' not in cha1):
                    result.append(cha)
                else:
                    cha = cha.replace(
                        '*', '.*')  # hack to capture simple regex comparisons

                    net, sta = netsta.split('.')
                    stations = ds.fds.get_stations(start_time, end_time, net,
                                                   sta)
                    for item in stations:
                        if (re.match(cha, item[3])):
                            result.append(item[3])
                            break
                        # end if
                    # end if
                # end if
            # end for

            return result

        # end func

        corr_chans = []
        if (corr_chan == 'z'):
            corr_chans = evaluate_channels(ds1_zchan, ds2_zchan)
        elif (corr_chan == 'n'):
            corr_chans = evaluate_channels(ds1_nchan, ds2_nchan)
        elif (corr_chan == 'e'):
            corr_chans = evaluate_channels(ds1_echan, ds2_echan)
        elif (corr_chan == 't'):
            corr_chans = ['00T', '00T']
        else:
            raise ValueError('Invalid corr-chan')

        if (len(corr_chans) < 2):
            print((
                'Either required channels are not found for station %s or %s, '
                'or no overlapping data exists..') % (netsta1, netsta2))
            continue
        # end if

        baz_netsta1 = None
        baz_netsta2 = None
        if (corr_chan == 't'):
            try:
                sta1_lon, sta1_lat = ds1.fds.unique_coordinates[netsta1]
                sta2_lon, sta2_lat = ds2.fds.unique_coordinates[netsta2]
                _, baz_netsta2, baz_netsta1 = gps2dist_azimuth(
                    sta1_lat, sta1_lon, sta2_lat, sta2_lon)
            except Exception as e:
                print(e)
                print((
                    'Failed to compute back-azimuth for station-pairs; skipping %s.%s; '
                    % (netsta1, netsta2)))
                continue
            # end try
        # end if

        x, xCorrResDict, wcResDict = IntervalStackXCorr(
            ds1.fds, ds2.fds, startTime, endTime, netsta1, netsta2, netsta1inv,
            netsta2inv, instrument_response_output, water_level, corr_chans[0],
            corr_chans[1], baz_netsta1, baz_netsta2, resample_rate,
            taper_length, read_buffer_size, interval_seconds, window_seconds,
            window_overlap, window_buffer_length, fmin, fmax, clip_to_2std,
            whitening, whitening_window_frequency, one_bit_normalize,
            envelope_normalize, ensemble_stack, output_path, 2, time_tag)
Exemplo n.º 46
0
def stream_add_stats(data_stream,
                     inv,
                     evt,
                     write_sac=False,
                     rotate_in_obspy=False,
                     verbose=True):
    for net in inv:
        for sta in net:
            str1 = data_stream.select(network=net.code, station=sta.code)
            #            print(str(net.code),str(sta.code),len(str1))
            if len(str1) == 0:
                if verbose:
                    print(
                        'Skipping ' + str(net.code) + '.' + str(sta.code) +
                        ':', str1)
                continue
            elif len(str1) % 3 != 0:
                print(
                    'Deleting traces: ' + str(net.code) + '.' + str(sta.code) +
                    ':', str1)
                for tr in str1:
                    data_stream.remove(tr)
                continue
            elif verbose:
                print('adding traces for ' + str(net.code) + '.' +
                      str(sta.code) + ': ' + str(len(str1)))
            # update in future to deal with multiple channel (total_number_of channels)
#            print(len(str1),str1)
#            if len(str1) % 3 !=0:
#                print('Problem: missing components', str1); exit()

            for tr in str1:
                for chan in sta:
                    if tr.stats.channel == chan.code and tr.stats.location == chan.location_code:
                        break
                else:
                    print('Problem finding channel in inventory', tr)
                    exit()
                tr.stats.coordinates = {
                    'latitude': chan.latitude,
                    'longitude': chan.longitude
                }
                (tr.stats.distance, tr.stats.azimuth,
                 tr.stats.back_azimuth) = gps2dist_azimuth(
                     chan.latitude, chan.longitude, evt.origins[0].latitude,
                     evt.origins[0].longitude)
                #print('Adding sac statistics for ', tr)
                if write_sac == True:
                    sac = AttribDict()
                    sac.kstnm = str(sta.code)
                    sac.knetwk = str(net.code)
                    sac.kcmpnm = str(chan.code)
                    sac.khole = str(chan.location_code)
                    sac.stla = chan.latitude
                    sac.stlo = chan.longitude
                    sac.stel = chan.elevation
                    sac.evla = evt.origins[0].latitude
                    sac.evlo = evt.origins[0].longitude
                    sac.evdp = evt.origins[0].depth / 1000.  # in km
                    sac.mag = evt.magnitudes[0].mag
                    time = evt.origins[0].time

                    sac.nzyear, sac.nzjday, sac.nzhour, sac.nzmin, sac.nzsec, sac.nzmsec = time.year, time.julday, time.hour, time.minute, time.second, time.microsecond / 1000
                    sac.o = 0.
                    sac.b = tr.stats.starttime - time  # this is very important!!
                    sac.kevnm = str(time)
                    sac.cmpaz = chan.azimuth
                    # dip is from horizontal downward; inc is from vertical downward
                    sac.cmpinc = chan.dip + 90
                    sac.gcarc = locations2degrees(evt.origins[0].latitude,
                                                  evt.origins[0].longitude,
                                                  chan.latitude,
                                                  chan.longitude)
                    sac.dist, sac.az, sac.baz = tr.stats.distance / 1000, tr.stats.azimuth, tr.stats.back_azimuth
                    tr.stats.sac = sac
                    tr_name = sta.code + '.' + net.code + '.' + chan.location_code + '.' + chan.code + '.sac'
                    tr.write(tr_name, format='SAC')
def vespagram(
    stream,
    ev,
    inv,
    method,
    scale,
    nthroot=4,
    static3D=False,
    vel_corr=4.8,
    sl=(0.0, 10.0, 0.1),
    plot_trace=True,
    phase_shift=0,
    phase=["PP"],
    plot_max_beam_trace=False,
    save_fig=False,
    plot_circle_path=False,
    plot_stations=False,
    vespagram_show=True,
    vespa_iter=0,
    name_vespa=True,
    static_correction=False,
):
    """
    :param stream: Waveforms for the array processing.
    :type stream: :class:`obspy.core.stream.Stream`
    :param inventory: Station metadata for waveforms
    :type inventory: :class:`obspy.station.inventory.Inventory`
    :param method: Method used for the array analysis
        (one of "DLS": Delay and Sum, "PWS": Phase Weighted Stack).
    :type method: str
    :param frqlow: Low corner of frequency range for array analysis
    :type frqlow: float
    :param frqhigh: High corner of frequency range for array analysis
    :type frqhigh: float
    :param baz: pre-defined (theoretical or calculated) backazimuth used for calculation
    :type baz_plot: float
    :param scale: scale for plotting
    :type scale: float
    :param nthroot: estimating the nthroot for calculation of the beam
    :type nthroot: int
    :param filter: Whether to bandpass data to selected frequency range
    :type filter: bool
    :param static3D: static correction of topography using `vel_corr` as
        velocity (slow!)
    :type static3D: bool
    :param vel_corr: Correction velocity for static topography correction in
        km/s.
    :type vel_corr: float
    :param sl: Min/Max and stepwidth slowness for analysis
    :type sl: (float, float,float)
    :param plot_trace: if True plot the vespagram as wiggle plot, if False as density map
    :phase_shift: time shifting for ploting theoretical taup-traveltimes phases
    """

    # choose the maximun of the starting times
    starttime = max([tr.stats.starttime for tr in stream])
    # choose the ninumun of the ending times
    endtime = min([tr.stats.endtime for tr in stream])
    # keep only the shortest window lenght of the whole seismograms
    stream.trim(starttime, endtime)
    # remove the trend
    # stream.detrend('simple')

    # print in the screen
    # print(starttime)
    # print(endtime)
    # closeInput = raw_input("Press ENTER to exit")

    org = ev.preferred_origin() or ev.origins[0]
    ev_lat = org.latitude
    ev_lon = org.longitude
    ev_depth = org.depth / 1000.0  # in km
    ev_otime = org.time

    # print(org)
    # print(ev_lat)
    # print(ev_lon)
    # print(ev_depth)
    # print(ev_otime)
    # closeInput = raw_input("Press ENTER to exit")

    sll, slm, sls = sl
    # print sl
    # print sll
    # print slm
    # print sls
    # closeInput = raw_input("Press ENTER to exit")

    sll /= KM_PER_DEG
    slm /= KM_PER_DEG
    sls /= KM_PER_DEG
    center_lon = 0.0
    center_lat = 0.0
    center_elv = 0.0
    seismo = stream
    # seismo.attach_response(inv)
    # seismo.merge()
    sz = Stream()
    i = 0
    for tr in seismo:
        for station in inv[0].stations:
            if tr.stats.station == station.code:
                # print(tr.stats.station)
                # print(station.code)
                tr.stats.coordinates = AttribDict(
                    {
                        "latitude": station.latitude,
                        "longitude": station.longitude,
                        "elevation": station.elevation,
                        "name": station.code,
                    }
                )
                center_lon += station.longitude
                center_lat += station.latitude
                center_elv += station.elevation
                i += 1
                # print(station.network)
        sz.append(tr)

    for network in inv:
        array_name = network.code
    array_name = array_name.encode("utf8")

    # print(array_name)
    # print(type(array_name))

    if i == 0:
        msg = "Stations can not be found!"
        raise ValueError(msg)

    # sz.plot()
    # stream.plot()

    center_lon /= float(i)
    center_lat /= float(i)
    center_elv /= float(i)

    # calculate the back azimuth
    great_cricle_dist, baz, az2 = gps2dist_azimuth(center_lat, center_lon, ev_lat, ev_lon)
    great_circle_dist_deg = great_cricle_dist / (1000 * KM_PER_DEG)
    # print("baz")
    # print(baz)
    # print("az2")
    # print(az2)

    if plot_circle_path:
        plot_great_circle_path(ev_lon, ev_lat, ev_depth, center_lon, center_lat, baz, great_cricle_dist, model)

    if plot_stations:
        plot_array_stations(sz, center_lon, center_lat, array_name)

    # print(center_lon)
    # print(center_lat)
    # print(center_elv)

    # closeInput = raw_input("Press ENTER to exit")

    # trim it again?!?!
    stt = starttime
    e = endtime
    nut = 0.0
    max_amp = 0.0
    # sz.trim(stt, e)
    # sz.detrend('simple')
    # print sz

    # compute the number of traces in the vespagram
    nbeam = int((slm - sll) / sls + 0.5) + 1
    # print("nbeam")
    # print(nbeam)

    # arguments to compute the vespagram
    kwargs = dict(
        # slowness grid: X min, X max, Y min, Y max, Slow Step
        sll=sll,
        slm=slm,
        sls=sls,
        baz=baz,
        stime=starttime,
        etime=endtime,
        source_depth=ev_depth,
        distance=great_circle_dist_deg,
        static_correction=static_correction,
        phase=phase,
        method=method,
        nthroot=nthroot,
        correct_3dplane=False,
        static_3D=static3D,
        vel_cor=vel_corr,
    )

    # date to compute total time in routine
    start = UTCDateTime()

    # compute the vespagram
    slow, beams, max_beam, beam_max, mini, maxi = vespagram_baz(sz, **kwargs)
    # print slow
    # print total time in routine
    print "Total time in routine: %.2f\n" % (UTCDateTime() - start)

    # Plot the seismograms
    # sampling rate
    df = sz[0].stats.sampling_rate
    npts = len(beams[0])
    # print("npts")
    # print(npts)
    # time vector
    T = np.arange(0, npts / df, 1 / df)
    # reconvert slowness to degrees
    sll *= KM_PER_DEG
    slm *= KM_PER_DEG
    sls *= KM_PER_DEG
    # slowness vector
    slow = np.arange(sll, slm, sls)
    max_amp = np.max(beams[:, :])
    # min_amp = np.min(beams[:, :])
    scale *= sls

    # initialize the figure
    fig = plt.figure(figsize=(12, 8))

    #    print("sl")
    #    print(sl)
    #    print("sll")
    #    print(sll)
    #    print("slm")
    #    print(slm)
    #    print("sls")
    #    print(sls)

    # get taup points for ploting the phases
    phase_name_info, phase_slowness_info, phase_time_info = get_taupy_points(
        center_lat,
        center_lon,
        ev_lat,
        ev_lon,
        ev_depth,
        starttime,
        endtime,
        mini,
        maxi,
        ev_otime,
        phase_shift,
        sll,
        slm,
    )

    # print(phase_name_info)
    # print(phase_slowness_info)
    # print(phase_time_info)

    if plot_trace:
        ax1 = fig.add_axes([0.1, 0.1, 0.85, 0.85])
        for i in xrange(nbeam):
            if plot_max_beam_trace:
                if i == max_beam:
                    ax1.plot(T, sll + scale * beams[i] / max_amp + i * sls, "r", zorder=1)
                else:
                    ax1.plot(T, sll + scale * beams[i] / max_amp + i * sls, "k", zorder=-1)
            else:
                ax1.plot(T, sll + scale * beams[i] / max_amp + i * sls, "k", zorder=-1)
        ax1.set_xlabel("Time (s)")
        ax1.set_ylabel("slowness (s/deg)")
        ax1.set_xlim(T[0], T[-1])
        data_minmax = ax1.yaxis.get_data_interval()
        minmax = [min(slow[0], data_minmax[0]), max(slow[-1], data_minmax[1])]
        ax1.set_ylim(*minmax)
        # plot the phase info
        ax1.scatter(phase_time_info, phase_slowness_info, s=2000, marker=u"|", lw=2, color="g")
        for i, txt in enumerate(phase_name_info):
            ax1.annotate(txt, (phase_time_info[i], phase_slowness_info[i]), fontsize=18, color="r")
    #####
    else:
        # step = (max_amp - min_amp)/100.
        # level = np.arange(min_amp, max_amp, step)
        # beams = beams.transpose()
        cmap = cm.hot_r
        # cmap = cm.rainbow

        ax1 = fig.add_axes([0.1, 0.1, 0.85, 0.85])
        # ax1.contour(slow,T,beams,level)
        # extent = (slow[0], slow[-1], \
        #               T[0], T[-1])
        extent = (T[0], T[-1], slow[0] - sls * 0.5, slow[-1] + sls * 0.5)

        ax1.set_ylabel("slowness (s/deg)")
        ax1.set_xlabel("T (s)")
        beams = np.flipud(beams)
        ax1.imshow(beams, cmap=cmap, interpolation="nearest", extent=extent, aspect="auto")
        # plot the phase info
        ax1.scatter(phase_time_info, phase_slowness_info, s=2000, marker=u"|", lw=2, color="g")
        for i, txt in enumerate(phase_name_info):
            ax1.annotate(txt, (phase_time_info[i], phase_slowness_info[i]), fontsize=18, color="r")

    ####
    result = "BAZ: %.2f Time: %s" % (baz, stt)
    ax1.set_title(result)

    if vespagram_show:
        plt.show()

    # save the figure
    if save_fig:
        fig_name = "vespagram_%s.pdf" % vespa_iter
        plt.savefig(fig_name, format="pdf", dpi=None)

    return slow, beams, max_beam, beam_max